[ 598.842362] env[68040]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=68040) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 598.842778] env[68040]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=68040) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 598.842819] env[68040]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=68040) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 598.843148] env[68040]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 598.930972] env[68040]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=68040) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:390}} [ 598.941008] env[68040]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=68040) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:428}} [ 599.076269] env[68040]: INFO nova.virt.driver [None req-cfe7e188-3c65-4a2a-b0b0-099a9c10d8dd None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 599.148413] env[68040]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 599.148568] env[68040]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.001s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 599.148654] env[68040]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=68040) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 602.241262] env[68040]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-122fdc65-d76a-4dc7-9f75-c3547305de1f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.260362] env[68040]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=68040) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 602.260362] env[68040]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-1024f248-df61-4ca6-b933-045bc3099fc9 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.296431] env[68040]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 80aac. [ 602.296678] env[68040]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.148s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 602.297189] env[68040]: INFO nova.virt.vmwareapi.driver [None req-cfe7e188-3c65-4a2a-b0b0-099a9c10d8dd None None] VMware vCenter version: 7.0.3 [ 602.300598] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb866da0-fadb-49de-af67-e1150dcb1f85 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.318551] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f195d76-5f7f-488d-9430-0bebc3363e5e {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.324700] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71599e1e-2ad1-45c4-89d6-6fc55f82e645 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.331799] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8b6ebb6-6649-42fe-918a-93577a4a39d1 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.344861] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85763d1e-c0ce-4353-b887-a7f40ceabe78 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.350925] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8adf017f-afc7-4a2c-a04e-97183c325d59 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.380986] env[68040]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-eb227bff-23ee-490f-98d3-27caf1281e71 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.386251] env[68040]: DEBUG nova.virt.vmwareapi.driver [None req-cfe7e188-3c65-4a2a-b0b0-099a9c10d8dd None None] Extension org.openstack.compute already exists. {{(pid=68040) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:224}} [ 602.388872] env[68040]: INFO nova.compute.provider_config [None req-cfe7e188-3c65-4a2a-b0b0-099a9c10d8dd None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 602.407255] env[68040]: DEBUG nova.context [None req-cfe7e188-3c65-4a2a-b0b0-099a9c10d8dd None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),ea6bb363-cb14-4140-a1c3-f9ff2f43551b(cell1) {{(pid=68040) load_cells /opt/stack/nova/nova/context.py:464}} [ 602.409098] env[68040]: DEBUG oslo_concurrency.lockutils [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 602.409322] env[68040]: DEBUG oslo_concurrency.lockutils [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 602.409957] env[68040]: DEBUG oslo_concurrency.lockutils [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 602.410380] env[68040]: DEBUG oslo_concurrency.lockutils [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] Acquiring lock "ea6bb363-cb14-4140-a1c3-f9ff2f43551b" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 602.410635] env[68040]: DEBUG oslo_concurrency.lockutils [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] Lock "ea6bb363-cb14-4140-a1c3-f9ff2f43551b" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 602.411633] env[68040]: DEBUG oslo_concurrency.lockutils [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] Lock "ea6bb363-cb14-4140-a1c3-f9ff2f43551b" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 602.431613] env[68040]: INFO dbcounter [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] Registered counter for database nova_cell0 [ 602.439641] env[68040]: INFO dbcounter [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] Registered counter for database nova_cell1 [ 602.442519] env[68040]: DEBUG oslo_db.sqlalchemy.engines [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=68040) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 602.442872] env[68040]: DEBUG oslo_db.sqlalchemy.engines [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=68040) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 602.447186] env[68040]: DEBUG dbcounter [-] [68040] Writer thread running {{(pid=68040) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 602.449919] env[68040]: ERROR nova.db.main.api [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 602.449919] env[68040]: result = function(*args, **kwargs) [ 602.449919] env[68040]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 602.449919] env[68040]: return func(*args, **kwargs) [ 602.449919] env[68040]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 602.449919] env[68040]: result = fn(*args, **kwargs) [ 602.449919] env[68040]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 602.449919] env[68040]: return f(*args, **kwargs) [ 602.449919] env[68040]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 602.449919] env[68040]: return db.service_get_minimum_version(context, binaries) [ 602.449919] env[68040]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 602.449919] env[68040]: _check_db_access() [ 602.449919] env[68040]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 602.449919] env[68040]: stacktrace = ''.join(traceback.format_stack()) [ 602.449919] env[68040]: [ 602.450632] env[68040]: DEBUG dbcounter [-] [68040] Writer thread running {{(pid=68040) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 602.451225] env[68040]: ERROR nova.db.main.api [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 602.451225] env[68040]: result = function(*args, **kwargs) [ 602.451225] env[68040]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 602.451225] env[68040]: return func(*args, **kwargs) [ 602.451225] env[68040]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 602.451225] env[68040]: result = fn(*args, **kwargs) [ 602.451225] env[68040]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 602.451225] env[68040]: return f(*args, **kwargs) [ 602.451225] env[68040]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 602.451225] env[68040]: return db.service_get_minimum_version(context, binaries) [ 602.451225] env[68040]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 602.451225] env[68040]: _check_db_access() [ 602.451225] env[68040]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 602.451225] env[68040]: stacktrace = ''.join(traceback.format_stack()) [ 602.451225] env[68040]: [ 602.451693] env[68040]: WARNING nova.objects.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 602.451723] env[68040]: WARNING nova.objects.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] Failed to get minimum service version for cell ea6bb363-cb14-4140-a1c3-f9ff2f43551b [ 602.452133] env[68040]: DEBUG oslo_concurrency.lockutils [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] Acquiring lock "singleton_lock" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 602.452298] env[68040]: DEBUG oslo_concurrency.lockutils [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] Acquired lock "singleton_lock" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 602.452600] env[68040]: DEBUG oslo_concurrency.lockutils [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] Releasing lock "singleton_lock" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 602.452929] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] Full set of CONF: {{(pid=68040) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 602.453085] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] ******************************************************************************** {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2600}} [ 602.453216] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] Configuration options gathered from: {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2601}} [ 602.453381] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 602.453577] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2603}} [ 602.453707] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] ================================================================================ {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2605}} [ 602.453916] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] allow_resize_to_same_host = True {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.454097] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] arq_binding_timeout = 300 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.454233] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] backdoor_port = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.454387] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] backdoor_socket = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.454566] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] block_device_allocate_retries = 60 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.454733] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] block_device_allocate_retries_interval = 3 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.454903] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cert = self.pem {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.455088] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.455261] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] compute_monitors = [] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.455428] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] config_dir = [] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.455603] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] config_drive_format = iso9660 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.455731] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.455893] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] config_source = [] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.456071] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] console_host = devstack {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.456242] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] control_exchange = nova {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.456405] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cpu_allocation_ratio = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.456565] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] daemon = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.456733] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] debug = True {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.456891] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] default_access_ip_network_name = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.457067] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] default_availability_zone = nova {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.457228] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] default_ephemeral_format = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.457385] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] default_green_pool_size = 1000 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.457617] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.457784] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] default_schedule_zone = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.457943] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] disk_allocation_ratio = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.458158] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] enable_new_services = True {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.458377] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] enabled_apis = ['osapi_compute'] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.458512] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] enabled_ssl_apis = [] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.458672] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] flat_injected = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.458829] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] force_config_drive = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.458986] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] force_raw_images = True {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.459170] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] graceful_shutdown_timeout = 5 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.459330] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] heal_instance_info_cache_interval = 60 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.459545] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] host = cpu-1 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.459723] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.459890] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] initial_disk_allocation_ratio = 1.0 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.460063] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] initial_ram_allocation_ratio = 1.0 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.460284] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.460450] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] instance_build_timeout = 0 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.460609] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] instance_delete_interval = 300 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.460778] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] instance_format = [instance: %(uuid)s] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.460944] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] instance_name_template = instance-%08x {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.461117] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] instance_usage_audit = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.461297] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] instance_usage_audit_period = month {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.461458] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.461621] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] instances_path = /opt/stack/data/nova/instances {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.461785] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] internal_service_availability_zone = internal {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.461942] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] key = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.462115] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] live_migration_retry_count = 30 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.462282] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] log_config_append = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.462454] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.462614] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] log_dir = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.462778] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] log_file = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.462901] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] log_options = True {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.463065] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] log_rotate_interval = 1 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.463274] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] log_rotate_interval_type = days {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.463433] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] log_rotation_type = none {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.463569] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.463697] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.463870] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.464049] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.464182] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.464378] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] long_rpc_timeout = 1800 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.464563] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] max_concurrent_builds = 10 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.464723] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] max_concurrent_live_migrations = 1 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.464882] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] max_concurrent_snapshots = 5 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.465049] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] max_local_block_devices = 3 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.465213] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] max_logfile_count = 30 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.465370] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] max_logfile_size_mb = 200 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.465530] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] maximum_instance_delete_attempts = 5 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.465696] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] metadata_listen = 0.0.0.0 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.465864] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] metadata_listen_port = 8775 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.466039] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] metadata_workers = 2 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.466203] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] migrate_max_retries = -1 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.466371] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] mkisofs_cmd = genisoimage {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.466578] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] my_block_storage_ip = 10.180.1.21 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.466709] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] my_ip = 10.180.1.21 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.466871] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] network_allocate_retries = 0 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.467056] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.467229] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] osapi_compute_listen = 0.0.0.0 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.467395] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] osapi_compute_listen_port = 8774 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.467563] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] osapi_compute_unique_server_name_scope = {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.467728] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] osapi_compute_workers = 2 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.467886] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] password_length = 12 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.468057] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] periodic_enable = True {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.468220] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] periodic_fuzzy_delay = 60 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.468386] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] pointer_model = usbtablet {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.468552] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] preallocate_images = none {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.468711] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] publish_errors = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.468840] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] pybasedir = /opt/stack/nova {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.468995] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] ram_allocation_ratio = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.469167] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] rate_limit_burst = 0 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.469329] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] rate_limit_except_level = CRITICAL {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.469492] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] rate_limit_interval = 0 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.469646] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] reboot_timeout = 0 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.469797] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] reclaim_instance_interval = 0 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.469953] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] record = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.470129] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] reimage_timeout_per_gb = 60 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.470297] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] report_interval = 120 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.470458] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] rescue_timeout = 0 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.470618] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] reserved_host_cpus = 0 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.470775] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] reserved_host_disk_mb = 0 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.470931] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] reserved_host_memory_mb = 512 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.471101] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] reserved_huge_pages = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.471262] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] resize_confirm_window = 0 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.471424] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] resize_fs_using_block_device = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.471581] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] resume_guests_state_on_host_boot = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.471747] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.471907] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] rpc_response_timeout = 60 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.472076] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] run_external_periodic_tasks = True {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.472245] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] running_deleted_instance_action = reap {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.472406] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] running_deleted_instance_poll_interval = 1800 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.472562] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] running_deleted_instance_timeout = 0 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.472719] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] scheduler_instance_sync_interval = 120 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.472884] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] service_down_time = 720 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.473061] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] servicegroup_driver = db {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.473222] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] shelved_offload_time = 0 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.473410] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] shelved_poll_interval = 3600 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.473582] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] shutdown_timeout = 0 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.473742] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] source_is_ipv6 = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.473899] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] ssl_only = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.474161] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.474337] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] sync_power_state_interval = 600 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.474527] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] sync_power_state_pool_size = 1000 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.474703] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] syslog_log_facility = LOG_USER {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.474863] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] tempdir = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.475033] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] timeout_nbd = 10 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.475205] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] transport_url = **** {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.475366] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] update_resources_interval = 0 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.475528] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] use_cow_images = True {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.475685] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] use_eventlog = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.475841] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] use_journal = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.475997] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] use_json = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.476166] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] use_rootwrap_daemon = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.476322] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] use_stderr = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.476479] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] use_syslog = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.476633] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vcpu_pin_set = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.476797] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vif_plugging_is_fatal = True {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.476960] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vif_plugging_timeout = 300 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.477138] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] virt_mkfs = [] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.477303] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] volume_usage_poll_interval = 0 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.477463] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] watch_log_file = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.477631] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] web = /usr/share/spice-html5 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 602.477810] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_concurrency.disable_process_locking = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.478116] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.478299] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.478466] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.478637] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.478805] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.478970] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.479162] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] api.auth_strategy = keystone {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.479328] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] api.compute_link_prefix = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.479505] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.479675] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] api.dhcp_domain = novalocal {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.479843] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] api.enable_instance_password = True {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.480024] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] api.glance_link_prefix = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.480191] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.480365] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.480559] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] api.instance_list_per_project_cells = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.480731] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] api.list_records_by_skipping_down_cells = True {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.480895] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] api.local_metadata_per_cell = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.481074] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] api.max_limit = 1000 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.481252] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] api.metadata_cache_expiration = 15 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.481430] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] api.neutron_default_tenant_id = default {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.481596] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] api.use_neutron_default_nets = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.481773] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.481937] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.482115] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.482294] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.482468] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] api.vendordata_dynamic_targets = [] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.482632] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] api.vendordata_jsonfile_path = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.482814] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.483016] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cache.backend = dogpile.cache.memcached {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.483188] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cache.backend_argument = **** {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.483386] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cache.config_prefix = cache.oslo {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.483565] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cache.dead_timeout = 60.0 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.483732] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cache.debug_cache_backend = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.483893] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cache.enable_retry_client = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.484066] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cache.enable_socket_keepalive = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.484254] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cache.enabled = True {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.484477] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cache.enforce_fips_mode = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.484671] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cache.expiration_time = 600 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.484840] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cache.hashclient_retry_attempts = 2 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.485019] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cache.hashclient_retry_delay = 1.0 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.485189] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cache.memcache_dead_retry = 300 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.485349] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cache.memcache_password = **** {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.485517] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.485679] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.485843] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cache.memcache_pool_maxsize = 10 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.486013] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.486184] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cache.memcache_sasl_enabled = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.486396] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.486625] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cache.memcache_socket_timeout = 1.0 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.486802] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cache.memcache_username = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.486971] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cache.proxies = [] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.487149] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cache.redis_password = **** {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.487326] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cache.redis_sentinel_service_name = mymaster {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.487526] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.487755] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cache.redis_server = localhost:6379 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.487934] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cache.redis_socket_timeout = 1.0 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.488110] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cache.redis_username = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.488278] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cache.retry_attempts = 2 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.488447] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cache.retry_delay = 0.0 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.488612] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cache.socket_keepalive_count = 1 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.488775] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cache.socket_keepalive_idle = 1 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.488935] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cache.socket_keepalive_interval = 1 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.489109] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cache.tls_allowed_ciphers = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.489272] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cache.tls_cafile = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.489432] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cache.tls_certfile = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.489594] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cache.tls_enabled = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.489751] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cache.tls_keyfile = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.489923] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cinder.auth_section = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.490105] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cinder.auth_type = password {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.490269] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cinder.cafile = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.490447] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cinder.catalog_info = volumev3::publicURL {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.490619] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cinder.certfile = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.490844] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cinder.collect_timing = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.491031] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cinder.cross_az_attach = True {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.491203] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cinder.debug = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.491393] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cinder.endpoint_template = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.491536] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cinder.http_retries = 3 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.491700] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cinder.insecure = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.491861] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cinder.keyfile = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.492047] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cinder.os_region_name = RegionOne {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.492220] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cinder.split_loggers = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.492383] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cinder.timeout = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.492557] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.492721] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] compute.cpu_dedicated_set = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.492880] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] compute.cpu_shared_set = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.493058] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] compute.image_type_exclude_list = [] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.493230] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.493416] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] compute.max_concurrent_disk_ops = 0 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.493585] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] compute.max_disk_devices_to_attach = -1 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.493749] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.493921] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.494098] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] compute.resource_provider_association_refresh = 300 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.494269] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] compute.shutdown_retry_interval = 10 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.494471] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.494658] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] conductor.workers = 2 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.494837] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] console.allowed_origins = [] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.494998] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] console.ssl_ciphers = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.495184] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] console.ssl_minimum_version = default {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.495354] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] consoleauth.enforce_session_timeout = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.495527] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] consoleauth.token_ttl = 600 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.495697] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cyborg.cafile = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.495858] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cyborg.certfile = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.496032] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cyborg.collect_timing = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.496198] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cyborg.connect_retries = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.496359] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cyborg.connect_retry_delay = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.496518] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cyborg.endpoint_override = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.496679] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cyborg.insecure = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.496833] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cyborg.keyfile = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.496989] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cyborg.max_version = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.497157] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cyborg.min_version = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.497314] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cyborg.region_name = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.497476] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cyborg.retriable_status_codes = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.497632] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cyborg.service_name = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.497807] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cyborg.service_type = accelerator {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.497966] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cyborg.split_loggers = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.498136] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cyborg.status_code_retries = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.498298] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cyborg.status_code_retry_delay = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.498458] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cyborg.timeout = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.498634] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.498792] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] cyborg.version = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.498970] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] database.backend = sqlalchemy {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.499152] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] database.connection = **** {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.499318] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] database.connection_debug = 0 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.499486] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] database.connection_parameters = {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.499651] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] database.connection_recycle_time = 3600 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.499810] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] database.connection_trace = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.499971] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] database.db_inc_retry_interval = True {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.500145] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] database.db_max_retries = 20 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.500307] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] database.db_max_retry_interval = 10 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.500470] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] database.db_retry_interval = 1 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.500630] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] database.max_overflow = 50 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.500789] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] database.max_pool_size = 5 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.500949] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] database.max_retries = 10 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.501131] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.501290] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] database.mysql_wsrep_sync_wait = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.501448] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] database.pool_timeout = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.501605] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] database.retry_interval = 10 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.501761] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] database.slave_connection = **** {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.501918] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] database.sqlite_synchronous = True {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.502090] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] database.use_db_reconnect = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.502303] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] api_database.backend = sqlalchemy {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.502621] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] api_database.connection = **** {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.502621] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] api_database.connection_debug = 0 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.502769] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] api_database.connection_parameters = {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.502951] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] api_database.connection_recycle_time = 3600 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.503130] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] api_database.connection_trace = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.503308] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] api_database.db_inc_retry_interval = True {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.503486] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] api_database.db_max_retries = 20 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.504408] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] api_database.db_max_retry_interval = 10 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.504408] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] api_database.db_retry_interval = 1 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.504408] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] api_database.max_overflow = 50 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.504408] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] api_database.max_pool_size = 5 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.504408] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] api_database.max_retries = 10 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.504574] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.504574] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.504731] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] api_database.pool_timeout = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.504893] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] api_database.retry_interval = 10 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.505058] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] api_database.slave_connection = **** {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.505222] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] api_database.sqlite_synchronous = True {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.505398] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] devices.enabled_mdev_types = [] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.505589] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.505781] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] ephemeral_storage_encryption.default_format = luks {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.505947] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] ephemeral_storage_encryption.enabled = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.506123] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.506298] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] glance.api_servers = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.506464] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] glance.cafile = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.506623] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] glance.certfile = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.506786] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] glance.collect_timing = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.506944] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] glance.connect_retries = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.507116] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] glance.connect_retry_delay = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.507280] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] glance.debug = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.507447] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] glance.default_trusted_certificate_ids = [] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.507606] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] glance.enable_certificate_validation = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.507766] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] glance.enable_rbd_download = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.507922] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] glance.endpoint_override = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.508099] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] glance.insecure = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.508262] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] glance.keyfile = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.508421] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] glance.max_version = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.508576] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] glance.min_version = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.508735] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] glance.num_retries = 3 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.508902] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] glance.rbd_ceph_conf = {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.509074] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] glance.rbd_connect_timeout = 5 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.509245] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] glance.rbd_pool = {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.509412] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] glance.rbd_user = {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.509571] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] glance.region_name = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.509727] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] glance.retriable_status_codes = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.509882] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] glance.service_name = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.510058] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] glance.service_type = image {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.510223] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] glance.split_loggers = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.510378] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] glance.status_code_retries = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.510537] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] glance.status_code_retry_delay = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.510692] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] glance.timeout = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.510868] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.511040] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] glance.verify_glance_signatures = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.511202] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] glance.version = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.511368] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] guestfs.debug = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.511536] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] mks.enabled = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.511908] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.512113] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] image_cache.manager_interval = 2400 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.512291] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] image_cache.precache_concurrency = 1 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.512468] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] image_cache.remove_unused_base_images = True {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.512638] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.512806] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.512981] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] image_cache.subdirectory_name = _base {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.513169] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] ironic.api_max_retries = 60 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.513399] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] ironic.api_retry_interval = 2 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.513520] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] ironic.auth_section = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.513683] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] ironic.auth_type = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.513841] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] ironic.cafile = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.513998] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] ironic.certfile = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.514176] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] ironic.collect_timing = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.514353] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] ironic.conductor_group = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.514542] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] ironic.connect_retries = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.514709] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] ironic.connect_retry_delay = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.514870] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] ironic.endpoint_override = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.515044] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] ironic.insecure = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.515208] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] ironic.keyfile = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.515367] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] ironic.max_version = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.515525] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] ironic.min_version = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.515723] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] ironic.peer_list = [] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.515892] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] ironic.region_name = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.516061] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] ironic.retriable_status_codes = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.516232] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] ironic.serial_console_state_timeout = 10 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.516393] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] ironic.service_name = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.516565] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] ironic.service_type = baremetal {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.516725] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] ironic.shard = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.516887] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] ironic.split_loggers = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.517053] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] ironic.status_code_retries = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.517215] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] ironic.status_code_retry_delay = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.517372] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] ironic.timeout = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.517552] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.517710] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] ironic.version = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.517889] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.518071] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] key_manager.fixed_key = **** {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.518260] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.518427] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] barbican.barbican_api_version = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.518581] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] barbican.barbican_endpoint = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.518750] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] barbican.barbican_endpoint_type = public {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.518911] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] barbican.barbican_region_name = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.519080] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] barbican.cafile = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.519244] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] barbican.certfile = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.519413] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] barbican.collect_timing = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.519574] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] barbican.insecure = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.519734] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] barbican.keyfile = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.519898] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] barbican.number_of_retries = 60 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.520070] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] barbican.retry_delay = 1 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.520236] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] barbican.send_service_user_token = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.520400] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] barbican.split_loggers = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.520556] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] barbican.timeout = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.520734] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] barbican.verify_ssl = True {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.520973] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] barbican.verify_ssl_path = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.521183] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] barbican_service_user.auth_section = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.521348] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] barbican_service_user.auth_type = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.521510] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] barbican_service_user.cafile = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.521670] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] barbican_service_user.certfile = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.521832] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] barbican_service_user.collect_timing = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.521992] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] barbican_service_user.insecure = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.522164] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] barbican_service_user.keyfile = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.522325] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] barbican_service_user.split_loggers = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.522483] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] barbican_service_user.timeout = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.522648] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vault.approle_role_id = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.522804] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vault.approle_secret_id = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.522961] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vault.cafile = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.523129] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vault.certfile = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.523322] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vault.collect_timing = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.523497] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vault.insecure = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.523661] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vault.keyfile = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.523833] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vault.kv_mountpoint = secret {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.523994] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vault.kv_path = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.524175] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vault.kv_version = 2 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.524346] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vault.namespace = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.524523] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vault.root_token_id = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.524689] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vault.split_loggers = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.524847] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vault.ssl_ca_crt_file = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.525010] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vault.timeout = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.525182] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vault.use_ssl = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.525353] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.525525] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] keystone.auth_section = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.525687] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] keystone.auth_type = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.525847] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] keystone.cafile = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.526009] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] keystone.certfile = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.526181] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] keystone.collect_timing = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.526342] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] keystone.connect_retries = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.526503] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] keystone.connect_retry_delay = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.526663] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] keystone.endpoint_override = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.526825] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] keystone.insecure = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.526982] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] keystone.keyfile = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.527151] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] keystone.max_version = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.527309] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] keystone.min_version = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.527471] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] keystone.region_name = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.527629] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] keystone.retriable_status_codes = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.527787] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] keystone.service_name = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.527959] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] keystone.service_type = identity {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.528134] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] keystone.split_loggers = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.528296] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] keystone.status_code_retries = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.528479] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] keystone.status_code_retry_delay = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.528655] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] keystone.timeout = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.528840] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.529009] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] keystone.version = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.529223] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.connection_uri = {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.529391] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.cpu_mode = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.529556] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.cpu_model_extra_flags = [] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.529726] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.cpu_models = [] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.529898] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.cpu_power_governor_high = performance {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.530084] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.cpu_power_governor_low = powersave {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.530251] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.cpu_power_management = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.530425] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.530591] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.device_detach_attempts = 8 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.530751] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.device_detach_timeout = 20 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.530915] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.disk_cachemodes = [] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.531084] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.disk_prefix = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.531279] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.enabled_perf_events = [] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.531454] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.file_backed_memory = 0 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.531621] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.gid_maps = [] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.531782] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.hw_disk_discard = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.531940] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.hw_machine_type = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.532127] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.images_rbd_ceph_conf = {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.532297] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.532463] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.532634] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.images_rbd_glance_store_name = {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.532804] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.images_rbd_pool = rbd {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.532973] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.images_type = default {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.533146] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.images_volume_group = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.533341] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.inject_key = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.533513] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.inject_partition = -2 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.533678] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.inject_password = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.533840] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.iscsi_iface = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.534012] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.iser_use_multipath = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.534183] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.live_migration_bandwidth = 0 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.534349] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.534513] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.live_migration_downtime = 500 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.534675] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.534836] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.534994] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.live_migration_inbound_addr = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.535168] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.535331] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.live_migration_permit_post_copy = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.535494] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.live_migration_scheme = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.535670] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.live_migration_timeout_action = abort {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.535835] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.live_migration_tunnelled = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.535999] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.live_migration_uri = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.536178] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.live_migration_with_native_tls = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.536339] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.max_queues = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.536504] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.536747] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.536916] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.nfs_mount_options = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.537227] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.537404] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.537574] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.num_iser_scan_tries = 5 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.537737] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.num_memory_encrypted_guests = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.537901] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.538074] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.num_pcie_ports = 0 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.538248] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.num_volume_scan_tries = 5 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.538418] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.pmem_namespaces = [] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.538581] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.quobyte_client_cfg = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.538861] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.539045] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.rbd_connect_timeout = 5 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.539218] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.539384] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.539548] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.rbd_secret_uuid = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.539708] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.rbd_user = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.539873] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.540054] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.remote_filesystem_transport = ssh {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.540223] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.rescue_image_id = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.540383] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.rescue_kernel_id = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.540544] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.rescue_ramdisk_id = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.540714] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.540871] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.rx_queue_size = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.541048] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.smbfs_mount_options = {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.541356] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.541532] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.snapshot_compression = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.541698] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.snapshot_image_format = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.541915] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.542094] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.sparse_logical_volumes = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.542263] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.swtpm_enabled = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.542438] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.swtpm_group = tss {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.542608] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.swtpm_user = tss {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.542777] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.sysinfo_serial = unique {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.542936] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.tb_cache_size = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.543104] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.tx_queue_size = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.543299] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.uid_maps = [] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.543471] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.use_virtio_for_bridges = True {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.543645] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.virt_type = kvm {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.543815] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.volume_clear = zero {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.543979] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.volume_clear_size = 0 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.544160] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.volume_use_multipath = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.544325] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.vzstorage_cache_path = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.544495] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.544664] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.vzstorage_mount_group = qemu {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.544829] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.vzstorage_mount_opts = [] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.544998] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.545290] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.545472] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.vzstorage_mount_user = stack {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.545658] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.545846] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] neutron.auth_section = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.546034] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] neutron.auth_type = password {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.546206] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] neutron.cafile = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.546369] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] neutron.certfile = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.546534] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] neutron.collect_timing = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.546694] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] neutron.connect_retries = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.546853] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] neutron.connect_retry_delay = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.547032] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] neutron.default_floating_pool = public {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.547196] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] neutron.endpoint_override = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.547364] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] neutron.extension_sync_interval = 600 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.547526] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] neutron.http_retries = 3 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.547687] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] neutron.insecure = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.547849] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] neutron.keyfile = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.548023] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] neutron.max_version = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.548194] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.548359] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] neutron.min_version = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.548526] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] neutron.ovs_bridge = br-int {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.548689] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] neutron.physnets = [] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.548859] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] neutron.region_name = RegionOne {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.549028] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] neutron.retriable_status_codes = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.549205] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] neutron.service_metadata_proxy = True {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.549367] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] neutron.service_name = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.549536] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] neutron.service_type = network {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.549696] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] neutron.split_loggers = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.549854] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] neutron.status_code_retries = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.550017] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] neutron.status_code_retry_delay = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.550181] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] neutron.timeout = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.550361] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.550524] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] neutron.version = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.550700] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] notifications.bdms_in_notifications = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.550869] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] notifications.default_level = INFO {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.551051] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] notifications.notification_format = unversioned {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.551240] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] notifications.notify_on_state_change = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.551432] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.551611] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] pci.alias = [] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.551783] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] pci.device_spec = [] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.551950] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] pci.report_in_placement = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.552137] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] placement.auth_section = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.552312] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] placement.auth_type = password {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.552479] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.552639] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] placement.cafile = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.552797] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] placement.certfile = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.552960] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] placement.collect_timing = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.553131] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] placement.connect_retries = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.553326] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] placement.connect_retry_delay = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.553496] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] placement.default_domain_id = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.553655] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] placement.default_domain_name = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.553840] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] placement.domain_id = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.553971] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] placement.domain_name = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.554146] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] placement.endpoint_override = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.554312] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] placement.insecure = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.554472] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] placement.keyfile = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.554625] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] placement.max_version = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.554781] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] placement.min_version = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.554946] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] placement.password = **** {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.555117] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] placement.project_domain_id = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.555287] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] placement.project_domain_name = Default {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.555454] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] placement.project_id = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.555641] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] placement.project_name = service {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.555824] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] placement.region_name = RegionOne {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.555988] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] placement.retriable_status_codes = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.556159] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] placement.service_name = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.556331] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] placement.service_type = placement {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.556497] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] placement.split_loggers = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.556656] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] placement.status_code_retries = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.556821] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] placement.status_code_retry_delay = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.556980] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] placement.system_scope = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.557152] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] placement.timeout = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.557312] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] placement.trust_id = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.557471] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] placement.user_domain_id = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.557637] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] placement.user_domain_name = Default {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.557798] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] placement.user_id = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.557969] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] placement.username = placement {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.559167] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.559167] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] placement.version = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.559167] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] quota.cores = 20 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.559167] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] quota.count_usage_from_placement = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.559167] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.559167] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] quota.injected_file_content_bytes = 10240 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.559374] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] quota.injected_file_path_length = 255 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.559374] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] quota.injected_files = 5 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.559503] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] quota.instances = 10 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.559730] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] quota.key_pairs = 100 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.559929] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] quota.metadata_items = 128 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.560109] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] quota.ram = 51200 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.560293] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] quota.recheck_quota = True {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.560509] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] quota.server_group_members = 10 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.560682] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] quota.server_groups = 10 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.560860] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.561040] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.561244] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] scheduler.image_metadata_prefilter = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.561438] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.561611] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] scheduler.max_attempts = 3 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.561777] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] scheduler.max_placement_results = 1000 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.561945] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.562126] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] scheduler.query_placement_for_image_type_support = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.562293] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.562474] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] scheduler.workers = 2 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.562656] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.562833] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.563020] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.563200] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.563393] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.563568] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.563740] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.563953] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.564117] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] filter_scheduler.host_subset_size = 1 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.564307] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.564482] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.564652] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.564821] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] filter_scheduler.isolated_hosts = [] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.564987] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] filter_scheduler.isolated_images = [] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.565167] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.565332] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.565508] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.565700] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] filter_scheduler.pci_in_placement = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.565875] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.566070] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.566249] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.566419] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.566583] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.566749] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.566912] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] filter_scheduler.track_instance_changes = True {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.567104] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.567284] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] metrics.required = True {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.567452] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] metrics.weight_multiplier = 1.0 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.567617] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.567782] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] metrics.weight_setting = [] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.568130] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.568313] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] serial_console.enabled = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.568500] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] serial_console.port_range = 10000:20000 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.568675] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.568848] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.569029] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] serial_console.serialproxy_port = 6083 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.569205] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] service_user.auth_section = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.569383] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] service_user.auth_type = password {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.569548] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] service_user.cafile = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.569711] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] service_user.certfile = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.569874] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] service_user.collect_timing = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.570049] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] service_user.insecure = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.570216] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] service_user.keyfile = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.570393] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] service_user.send_service_user_token = True {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.570559] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] service_user.split_loggers = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.570718] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] service_user.timeout = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.570888] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] spice.agent_enabled = True {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.571077] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] spice.enabled = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.571431] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.571638] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.571815] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] spice.html5proxy_port = 6082 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.571980] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] spice.image_compression = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.572159] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] spice.jpeg_compression = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.572324] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] spice.playback_compression = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.572502] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] spice.server_listen = 127.0.0.1 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.572672] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.572834] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] spice.streaming_mode = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.572994] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] spice.zlib_compression = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.573177] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] upgrade_levels.baseapi = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.573388] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] upgrade_levels.compute = auto {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.573563] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] upgrade_levels.conductor = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.573726] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] upgrade_levels.scheduler = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.573895] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vendordata_dynamic_auth.auth_section = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.574066] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vendordata_dynamic_auth.auth_type = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.574241] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vendordata_dynamic_auth.cafile = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.574420] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vendordata_dynamic_auth.certfile = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.574587] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.574750] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vendordata_dynamic_auth.insecure = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.574910] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vendordata_dynamic_auth.keyfile = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.575085] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.575248] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vendordata_dynamic_auth.timeout = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.575427] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vmware.api_retry_count = 10 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.575883] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vmware.ca_file = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.575883] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vmware.cache_prefix = devstack-image-cache {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.575951] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vmware.cluster_name = testcl1 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.576134] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vmware.connection_pool_size = 10 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.576298] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vmware.console_delay_seconds = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.576474] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vmware.datastore_regex = ^datastore.* {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.576692] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.576869] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vmware.host_password = **** {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.577050] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vmware.host_port = 443 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.577230] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vmware.host_username = administrator@vsphere.local {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.577412] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vmware.insecure = True {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.577575] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vmware.integration_bridge = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.577741] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vmware.maximum_objects = 100 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.577903] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vmware.pbm_default_policy = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.578079] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vmware.pbm_enabled = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.578248] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vmware.pbm_wsdl_location = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.578423] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.578590] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vmware.serial_port_proxy_uri = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.578742] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vmware.serial_port_service_uri = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.578911] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vmware.task_poll_interval = 0.5 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.579094] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vmware.use_linked_clone = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.579269] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vmware.vnc_keymap = en-us {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.579440] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vmware.vnc_port = 5900 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.579603] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vmware.vnc_port_total = 10000 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.579791] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vnc.auth_schemes = ['none'] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.579967] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vnc.enabled = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.580282] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.580473] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.580648] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vnc.novncproxy_port = 6080 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.580827] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vnc.server_listen = 127.0.0.1 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.581007] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.581199] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vnc.vencrypt_ca_certs = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.581377] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vnc.vencrypt_client_cert = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.581543] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vnc.vencrypt_client_key = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.581727] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.581891] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] workarounds.disable_deep_image_inspection = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.582064] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.582234] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.582402] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.582563] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] workarounds.disable_rootwrap = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.582724] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] workarounds.enable_numa_live_migration = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.582887] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.583060] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.583226] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.583417] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] workarounds.libvirt_disable_apic = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.583587] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.583752] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.583915] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.584090] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.584274] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.584464] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.584631] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.584795] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.584958] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.585140] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.585330] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.585504] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] wsgi.client_socket_timeout = 900 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.585672] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] wsgi.default_pool_size = 1000 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.585839] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] wsgi.keep_alive = True {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.586015] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] wsgi.max_header_line = 16384 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.586188] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] wsgi.secure_proxy_ssl_header = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.586351] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] wsgi.ssl_ca_file = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.586517] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] wsgi.ssl_cert_file = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.586680] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] wsgi.ssl_key_file = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.586847] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] wsgi.tcp_keepidle = 600 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.587033] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.587210] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] zvm.ca_file = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.587373] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] zvm.cloud_connector_url = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.587671] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.587848] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] zvm.reachable_timeout = 300 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.588044] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_policy.enforce_new_defaults = True {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.588224] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_policy.enforce_scope = True {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.588403] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_policy.policy_default_rule = default {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.588584] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.588760] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_policy.policy_file = policy.yaml {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.588932] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.589109] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.589274] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.589437] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.589601] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.589770] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.589949] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.590141] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] profiler.connection_string = messaging:// {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.590313] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] profiler.enabled = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.590485] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] profiler.es_doc_type = notification {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.590650] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] profiler.es_scroll_size = 10000 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.590819] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] profiler.es_scroll_time = 2m {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.590982] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] profiler.filter_error_trace = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.591187] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] profiler.hmac_keys = **** {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.591376] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] profiler.sentinel_service_name = mymaster {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.591551] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] profiler.socket_timeout = 0.1 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.591718] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] profiler.trace_requests = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.591883] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] profiler.trace_sqlalchemy = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.592078] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] profiler_jaeger.process_tags = {} {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.592250] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] profiler_jaeger.service_name_prefix = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.592417] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] profiler_otlp.service_name_prefix = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.592579] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] remote_debug.host = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.592739] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] remote_debug.port = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.592918] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.593097] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.593290] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.593468] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.593634] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.593795] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.593957] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.594134] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.594322] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.594514] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.594681] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.594854] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.595071] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.595260] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.595439] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.595610] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.595776] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.595953] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.596227] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.596315] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.596471] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.596639] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.596802] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.596968] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.597145] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.597312] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.597478] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.597637] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.597805] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.597971] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_messaging_rabbit.ssl = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.598159] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.598335] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.598502] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.598674] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.598845] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_messaging_rabbit.ssl_version = {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.599014] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.599206] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.599374] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_messaging_notifications.retry = -1 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.599559] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.599733] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_messaging_notifications.transport_url = **** {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.599903] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_limit.auth_section = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.600081] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_limit.auth_type = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.600246] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_limit.cafile = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.600408] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_limit.certfile = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.600571] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_limit.collect_timing = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.600730] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_limit.connect_retries = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.600889] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_limit.connect_retry_delay = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.601055] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_limit.endpoint_id = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.601271] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_limit.endpoint_override = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.601462] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_limit.insecure = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.601627] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_limit.keyfile = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.601787] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_limit.max_version = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.601947] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_limit.min_version = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.602120] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_limit.region_name = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.602285] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_limit.retriable_status_codes = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.602446] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_limit.service_name = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.602605] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_limit.service_type = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.602766] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_limit.split_loggers = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.602926] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_limit.status_code_retries = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.603175] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_limit.status_code_retry_delay = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.603377] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_limit.timeout = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.603547] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_limit.valid_interfaces = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.603709] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_limit.version = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.603879] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_reports.file_event_handler = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.604057] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.604225] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] oslo_reports.log_dir = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.604455] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.604636] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.604801] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.604971] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.605154] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.605317] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.605492] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.605653] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vif_plug_ovs_privileged.group = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.605813] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.605979] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.606156] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.606318] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] vif_plug_ovs_privileged.user = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.606490] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] os_vif_linux_bridge.flat_interface = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.606670] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.606844] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.607027] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.607204] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.607370] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.607538] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.607700] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.607878] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.608061] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] os_vif_ovs.isolate_vif = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.608239] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.608408] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.608579] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.608750] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] os_vif_ovs.ovsdb_interface = native {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.608912] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] os_vif_ovs.per_port_bridge = False {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.609091] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] os_brick.lock_path = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.609260] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.609427] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] os_brick.wait_mpath_device_interval = 1 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.609597] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] privsep_osbrick.capabilities = [21] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.609831] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] privsep_osbrick.group = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.610035] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] privsep_osbrick.helper_command = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.610214] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.610383] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.610548] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] privsep_osbrick.user = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.610725] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.610885] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] nova_sys_admin.group = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.611054] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] nova_sys_admin.helper_command = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.611228] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.611389] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.611549] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] nova_sys_admin.user = None {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 602.611680] env[68040]: DEBUG oslo_service.service [None req-522a9e0d-161a-4820-a301-6563434dd955 None None] ******************************************************************************** {{(pid=68040) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2624}} [ 602.612110] env[68040]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 602.622214] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-40fb4793-2856-4fb2-a23a-8700e38c7a6e None None] Getting list of instances from cluster (obj){ [ 602.622214] env[68040]: value = "domain-c8" [ 602.622214] env[68040]: _type = "ClusterComputeResource" [ 602.622214] env[68040]: } {{(pid=68040) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 602.623463] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90a2f69a-9c55-4723-82d1-6cb8e1c9036f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.632408] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-40fb4793-2856-4fb2-a23a-8700e38c7a6e None None] Got total of 0 instances {{(pid=68040) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 602.632932] env[68040]: WARNING nova.virt.vmwareapi.driver [None req-40fb4793-2856-4fb2-a23a-8700e38c7a6e None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 602.633408] env[68040]: INFO nova.virt.node [None req-40fb4793-2856-4fb2-a23a-8700e38c7a6e None None] Generated node identity 22db6f73-b3da-436a-bf40-9c8c240b2e44 [ 602.633631] env[68040]: INFO nova.virt.node [None req-40fb4793-2856-4fb2-a23a-8700e38c7a6e None None] Wrote node identity 22db6f73-b3da-436a-bf40-9c8c240b2e44 to /opt/stack/data/n-cpu-1/compute_id [ 602.645721] env[68040]: WARNING nova.compute.manager [None req-40fb4793-2856-4fb2-a23a-8700e38c7a6e None None] Compute nodes ['22db6f73-b3da-436a-bf40-9c8c240b2e44'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 602.679867] env[68040]: INFO nova.compute.manager [None req-40fb4793-2856-4fb2-a23a-8700e38c7a6e None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 602.700886] env[68040]: WARNING nova.compute.manager [None req-40fb4793-2856-4fb2-a23a-8700e38c7a6e None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 602.701396] env[68040]: DEBUG oslo_concurrency.lockutils [None req-40fb4793-2856-4fb2-a23a-8700e38c7a6e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 602.701647] env[68040]: DEBUG oslo_concurrency.lockutils [None req-40fb4793-2856-4fb2-a23a-8700e38c7a6e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 602.701802] env[68040]: DEBUG oslo_concurrency.lockutils [None req-40fb4793-2856-4fb2-a23a-8700e38c7a6e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 602.701959] env[68040]: DEBUG nova.compute.resource_tracker [None req-40fb4793-2856-4fb2-a23a-8700e38c7a6e None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68040) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 602.703036] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1704abfe-fc5c-4a3f-b812-660eae9517a5 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.711266] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61204c40-a676-49c7-b6a8-7e35167dd9fd {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.725015] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1398d388-8f7b-49f6-b723-884cbe088ddf {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.731062] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce58d482-5c3c-403d-9bf2-5507c340c03b {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.760749] env[68040]: DEBUG nova.compute.resource_tracker [None req-40fb4793-2856-4fb2-a23a-8700e38c7a6e None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180991MB free_disk=126GB free_vcpus=48 pci_devices=None {{(pid=68040) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 602.760853] env[68040]: DEBUG oslo_concurrency.lockutils [None req-40fb4793-2856-4fb2-a23a-8700e38c7a6e None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 602.761053] env[68040]: DEBUG oslo_concurrency.lockutils [None req-40fb4793-2856-4fb2-a23a-8700e38c7a6e None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 602.773386] env[68040]: WARNING nova.compute.resource_tracker [None req-40fb4793-2856-4fb2-a23a-8700e38c7a6e None None] No compute node record for cpu-1:22db6f73-b3da-436a-bf40-9c8c240b2e44: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 22db6f73-b3da-436a-bf40-9c8c240b2e44 could not be found. [ 602.786521] env[68040]: INFO nova.compute.resource_tracker [None req-40fb4793-2856-4fb2-a23a-8700e38c7a6e None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 22db6f73-b3da-436a-bf40-9c8c240b2e44 [ 602.839051] env[68040]: DEBUG nova.compute.resource_tracker [None req-40fb4793-2856-4fb2-a23a-8700e38c7a6e None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 602.839240] env[68040]: DEBUG nova.compute.resource_tracker [None req-40fb4793-2856-4fb2-a23a-8700e38c7a6e None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 602.937469] env[68040]: INFO nova.scheduler.client.report [None req-40fb4793-2856-4fb2-a23a-8700e38c7a6e None None] [req-105b310f-3dd9-4ddc-ac4c-10de4eb2ae7b] Created resource provider record via placement API for resource provider with UUID 22db6f73-b3da-436a-bf40-9c8c240b2e44 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 602.955210] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fc4d06f-a16e-4c5f-8864-70cf4552af69 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.962631] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c747d55-5312-4fc9-b86d-714b094037c6 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.993909] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d4c8e13-bd7e-4496-9404-0616d191f1bc {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.001490] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf4101a6-f5ba-4ceb-aa5f-b055ab74df96 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.015301] env[68040]: DEBUG nova.compute.provider_tree [None req-40fb4793-2856-4fb2-a23a-8700e38c7a6e None None] Updating inventory in ProviderTree for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 603.056229] env[68040]: DEBUG nova.scheduler.client.report [None req-40fb4793-2856-4fb2-a23a-8700e38c7a6e None None] Updated inventory for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:957}} [ 603.056554] env[68040]: DEBUG nova.compute.provider_tree [None req-40fb4793-2856-4fb2-a23a-8700e38c7a6e None None] Updating resource provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 generation from 0 to 1 during operation: update_inventory {{(pid=68040) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 603.056706] env[68040]: DEBUG nova.compute.provider_tree [None req-40fb4793-2856-4fb2-a23a-8700e38c7a6e None None] Updating inventory in ProviderTree for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 603.103604] env[68040]: DEBUG nova.compute.provider_tree [None req-40fb4793-2856-4fb2-a23a-8700e38c7a6e None None] Updating resource provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 generation from 1 to 2 during operation: update_traits {{(pid=68040) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 603.122009] env[68040]: DEBUG nova.compute.resource_tracker [None req-40fb4793-2856-4fb2-a23a-8700e38c7a6e None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68040) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 603.122342] env[68040]: DEBUG oslo_concurrency.lockutils [None req-40fb4793-2856-4fb2-a23a-8700e38c7a6e None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.361s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 603.122530] env[68040]: DEBUG nova.service [None req-40fb4793-2856-4fb2-a23a-8700e38c7a6e None None] Creating RPC server for service compute {{(pid=68040) start /opt/stack/nova/nova/service.py:182}} [ 603.136565] env[68040]: DEBUG nova.service [None req-40fb4793-2856-4fb2-a23a-8700e38c7a6e None None] Join ServiceGroup membership for this service compute {{(pid=68040) start /opt/stack/nova/nova/service.py:199}} [ 603.136755] env[68040]: DEBUG nova.servicegroup.drivers.db [None req-40fb4793-2856-4fb2-a23a-8700e38c7a6e None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=68040) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 611.138998] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._sync_power_states {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 611.149224] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Getting list of instances from cluster (obj){ [ 611.149224] env[68040]: value = "domain-c8" [ 611.149224] env[68040]: _type = "ClusterComputeResource" [ 611.149224] env[68040]: } {{(pid=68040) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 611.150384] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9043f28-66e2-42ad-908d-8e8ab7db76e1 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.159105] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Got total of 0 instances {{(pid=68040) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 611.159318] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 611.159632] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Getting list of instances from cluster (obj){ [ 611.159632] env[68040]: value = "domain-c8" [ 611.159632] env[68040]: _type = "ClusterComputeResource" [ 611.159632] env[68040]: } {{(pid=68040) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 611.160637] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49e635e8-d05a-421f-825a-5899e1ca6e36 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.167899] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Got total of 0 instances {{(pid=68040) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 612.448395] env[68040]: DEBUG dbcounter [-] [68040] Writing DB stats nova_cell0:SELECT=1 {{(pid=68040) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 612.450975] env[68040]: DEBUG dbcounter [-] [68040] Writing DB stats nova_cell1:SELECT=1 {{(pid=68040) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 641.894031] env[68040]: DEBUG oslo_concurrency.lockutils [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Acquiring lock "467ffaac-0414-4bed-af2c-d0939d90ba79" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 641.894031] env[68040]: DEBUG oslo_concurrency.lockutils [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Lock "467ffaac-0414-4bed-af2c-d0939d90ba79" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 641.917551] env[68040]: DEBUG nova.compute.manager [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 642.063319] env[68040]: DEBUG oslo_concurrency.lockutils [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 642.063575] env[68040]: DEBUG oslo_concurrency.lockutils [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 642.065416] env[68040]: INFO nova.compute.claims [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 642.214781] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b339bfd0-5d71-4394-95c7-30d84b6efe8b {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.223831] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bae8b710-9c61-47f4-93e3-f3cb35997a3b {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.261295] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed8c8b7f-28e8-4748-af1b-a8bba2a3b9fe {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.267607] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0af07e69-0ee8-457a-acba-d81d51719106 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.282306] env[68040]: DEBUG nova.compute.provider_tree [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 642.302093] env[68040]: DEBUG nova.scheduler.client.report [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 642.325501] env[68040]: DEBUG oslo_concurrency.lockutils [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.261s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 642.329138] env[68040]: DEBUG nova.compute.manager [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] Start building networks asynchronously for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 642.406174] env[68040]: DEBUG nova.compute.utils [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Using /dev/sd instead of None {{(pid=68040) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 642.415022] env[68040]: DEBUG nova.compute.manager [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] Allocating IP information in the background. {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 642.415022] env[68040]: DEBUG nova.network.neutron [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] allocate_for_instance() {{(pid=68040) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 642.443185] env[68040]: DEBUG nova.compute.manager [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] Start building block device mappings for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 642.554472] env[68040]: DEBUG nova.compute.manager [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] Start spawning the instance on the hypervisor. {{(pid=68040) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 644.504837] env[68040]: DEBUG nova.virt.hardware [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:59:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:59:33Z,direct_url=,disk_format='vmdk',id=8c308313-03d5-40b6-a5fe-9037e32dc76e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0770d674a39c40089de0aade9440b370',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:59:34Z,virtual_size=,visibility=), allow threads: False {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 644.505162] env[68040]: DEBUG nova.virt.hardware [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Flavor limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 644.505270] env[68040]: DEBUG nova.virt.hardware [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Image limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 644.505430] env[68040]: DEBUG nova.virt.hardware [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Flavor pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 644.505576] env[68040]: DEBUG nova.virt.hardware [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Image pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 644.505729] env[68040]: DEBUG nova.virt.hardware [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 644.505962] env[68040]: DEBUG nova.virt.hardware [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 644.508870] env[68040]: DEBUG nova.virt.hardware [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 644.509329] env[68040]: DEBUG nova.virt.hardware [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Got 1 possible topologies {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 644.509514] env[68040]: DEBUG nova.virt.hardware [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 644.509692] env[68040]: DEBUG nova.virt.hardware [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 644.510665] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fc1374c-592c-448c-b37b-4ce37891bbac {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.524015] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f614b509-d847-4f5f-b1cf-2628e995613a {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.541439] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-786c2e74-7387-4351-a0ff-517acfda8522 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.706239] env[68040]: DEBUG nova.policy [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '05a43c590f004d79bde8601cd894c835', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '73fdb6a62e9d4d3bb225f296541f3572', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68040) authorize /opt/stack/nova/nova/policy.py:203}} [ 645.248575] env[68040]: DEBUG nova.network.neutron [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] Successfully created port: 745bf624-5c57-41fe-8a1e-1dbabc1f4c13 {{(pid=68040) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 647.056163] env[68040]: DEBUG oslo_concurrency.lockutils [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Acquiring lock "7374c1a5-1b4f-4026-b885-bf0eb12a850e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 647.056163] env[68040]: DEBUG oslo_concurrency.lockutils [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Lock "7374c1a5-1b4f-4026-b885-bf0eb12a850e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 647.079499] env[68040]: DEBUG nova.compute.manager [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 647.149784] env[68040]: DEBUG oslo_concurrency.lockutils [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 647.150083] env[68040]: DEBUG oslo_concurrency.lockutils [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 647.151956] env[68040]: INFO nova.compute.claims [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 647.286328] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33992618-597b-4636-887e-5a1bd9c1c7c3 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.293959] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eedab452-5e2d-413e-9151-e6f533d1ed46 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.336287] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3c3ec00-8611-4504-b9a7-c7a1298a95c2 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.344763] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40fea045-4a81-40fe-b9be-0ba22c75a172 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.361502] env[68040]: DEBUG nova.compute.provider_tree [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 647.374492] env[68040]: DEBUG nova.scheduler.client.report [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 647.397023] env[68040]: DEBUG oslo_concurrency.lockutils [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.246s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 647.398939] env[68040]: DEBUG nova.compute.manager [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] Start building networks asynchronously for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 647.450327] env[68040]: DEBUG nova.compute.utils [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Using /dev/sd instead of None {{(pid=68040) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 647.451302] env[68040]: DEBUG nova.compute.manager [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] Allocating IP information in the background. {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 647.451793] env[68040]: DEBUG nova.network.neutron [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] allocate_for_instance() {{(pid=68040) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 647.478553] env[68040]: DEBUG nova.compute.manager [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] Start building block device mappings for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 647.579813] env[68040]: DEBUG nova.compute.manager [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] Start spawning the instance on the hypervisor. {{(pid=68040) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 647.613540] env[68040]: DEBUG nova.virt.hardware [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:59:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:59:33Z,direct_url=,disk_format='vmdk',id=8c308313-03d5-40b6-a5fe-9037e32dc76e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0770d674a39c40089de0aade9440b370',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:59:34Z,virtual_size=,visibility=), allow threads: False {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 647.614029] env[68040]: DEBUG nova.virt.hardware [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Flavor limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 647.614029] env[68040]: DEBUG nova.virt.hardware [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Image limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 647.614201] env[68040]: DEBUG nova.virt.hardware [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Flavor pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 647.614351] env[68040]: DEBUG nova.virt.hardware [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Image pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 647.614522] env[68040]: DEBUG nova.virt.hardware [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 647.614740] env[68040]: DEBUG nova.virt.hardware [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 647.614862] env[68040]: DEBUG nova.virt.hardware [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 647.615211] env[68040]: DEBUG nova.virt.hardware [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Got 1 possible topologies {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 647.615291] env[68040]: DEBUG nova.virt.hardware [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 647.615458] env[68040]: DEBUG nova.virt.hardware [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 647.616357] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3750a14-ad04-426b-a2b6-3cf2091230f0 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.625493] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc99ca61-ee1b-43e1-bfe7-4052aa7a9ceb {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.673569] env[68040]: DEBUG nova.policy [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b9bc5b5d24ec4c3da3a9bbb7f74b26c2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bba47d6853164b709160fe935e827841', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68040) authorize /opt/stack/nova/nova/policy.py:203}} [ 647.979090] env[68040]: DEBUG nova.network.neutron [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] Successfully updated port: 745bf624-5c57-41fe-8a1e-1dbabc1f4c13 {{(pid=68040) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 647.999312] env[68040]: DEBUG oslo_concurrency.lockutils [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Acquiring lock "refresh_cache-467ffaac-0414-4bed-af2c-d0939d90ba79" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 648.001548] env[68040]: DEBUG oslo_concurrency.lockutils [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Acquired lock "refresh_cache-467ffaac-0414-4bed-af2c-d0939d90ba79" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 648.001548] env[68040]: DEBUG nova.network.neutron [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] Building network info cache for instance {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 648.111835] env[68040]: DEBUG nova.network.neutron [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 648.926930] env[68040]: DEBUG nova.network.neutron [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] Updating instance_info_cache with network_info: [{"id": "745bf624-5c57-41fe-8a1e-1dbabc1f4c13", "address": "fa:16:3e:b0:18:2b", "network": {"id": "ca1a84af-ab33-497c-8767-fd4463c076be", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.47", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "0770d674a39c40089de0aade9440b370", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap745bf624-5c", "ovs_interfaceid": "745bf624-5c57-41fe-8a1e-1dbabc1f4c13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 648.928920] env[68040]: DEBUG nova.network.neutron [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] Successfully created port: ff42878f-c30e-4cd9-af66-1ac3a9ad0f6e {{(pid=68040) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 648.945347] env[68040]: DEBUG oslo_concurrency.lockutils [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Releasing lock "refresh_cache-467ffaac-0414-4bed-af2c-d0939d90ba79" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 648.945897] env[68040]: DEBUG nova.compute.manager [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] Instance network_info: |[{"id": "745bf624-5c57-41fe-8a1e-1dbabc1f4c13", "address": "fa:16:3e:b0:18:2b", "network": {"id": "ca1a84af-ab33-497c-8767-fd4463c076be", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.47", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "0770d674a39c40089de0aade9440b370", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap745bf624-5c", "ovs_interfaceid": "745bf624-5c57-41fe-8a1e-1dbabc1f4c13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 648.947863] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b0:18:2b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7e0240aa-a694-48fc-a0f9-6f2d3e71aa12', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '745bf624-5c57-41fe-8a1e-1dbabc1f4c13', 'vif_model': 'vmxnet3'}] {{(pid=68040) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 648.961439] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 648.962922] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9f0cb28e-cd2d-434e-bda9-96510f62e1af {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.976568] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Created folder: OpenStack in parent group-v4. [ 648.976774] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Creating folder: Project (73fdb6a62e9d4d3bb225f296541f3572). Parent ref: group-v639956. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 648.977363] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ae1aafb5-d964-4a14-9768-b465d969fd45 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.991083] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Created folder: Project (73fdb6a62e9d4d3bb225f296541f3572) in parent group-v639956. [ 648.991083] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Creating folder: Instances. Parent ref: group-v639957. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 648.991083] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dd3f2e60-5a2a-4b7a-97fa-b8f81c4296c5 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.000416] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Created folder: Instances in parent group-v639957. [ 649.001398] env[68040]: DEBUG oslo.service.loopingcall [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 649.001626] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] Creating VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 649.001846] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1f2da967-e5e4-4a84-878a-ca5ad78dd412 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.027034] env[68040]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 649.027034] env[68040]: value = "task-3200143" [ 649.027034] env[68040]: _type = "Task" [ 649.027034] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.037560] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200143, 'name': CreateVM_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.394411] env[68040]: DEBUG oslo_concurrency.lockutils [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Acquiring lock "4cc61343-486f-466c-9881-1a6856c82748" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 649.394702] env[68040]: DEBUG oslo_concurrency.lockutils [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Lock "4cc61343-486f-466c-9881-1a6856c82748" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 649.411854] env[68040]: DEBUG nova.compute.manager [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 649.534448] env[68040]: DEBUG oslo_concurrency.lockutils [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 649.535614] env[68040]: DEBUG oslo_concurrency.lockutils [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 649.538019] env[68040]: INFO nova.compute.claims [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 649.549902] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200143, 'name': CreateVM_Task, 'duration_secs': 0.335517} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 649.553055] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] Created VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 649.579620] env[68040]: DEBUG oslo_vmware.service [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-935d4407-e428-48be-9e76-65ef8acc24d8 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.586273] env[68040]: DEBUG oslo_concurrency.lockutils [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 649.586715] env[68040]: DEBUG oslo_concurrency.lockutils [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 649.587793] env[68040]: DEBUG oslo_concurrency.lockutils [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 649.588205] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a5877b1c-4cec-4cf6-bdf0-12e7cd0d020a {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.594492] env[68040]: DEBUG oslo_vmware.api [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Waiting for the task: (returnval){ [ 649.594492] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]5218aa38-09dd-2720-754b-01a281303225" [ 649.594492] env[68040]: _type = "Task" [ 649.594492] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.603393] env[68040]: DEBUG oslo_vmware.api [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]5218aa38-09dd-2720-754b-01a281303225, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.803015] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aed26006-9d8a-4f9a-ada0-e39ba3e3c071 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.814285] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd2de707-975c-415d-a4a9-87f4b0b8ee96 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.856519] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c65743e0-20a1-41ea-a714-133c2e20eace {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.862438] env[68040]: DEBUG oslo_concurrency.lockutils [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Acquiring lock "8ae39d32-abb3-4e3e-8d2d-003eda60b136" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 649.862660] env[68040]: DEBUG oslo_concurrency.lockutils [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Lock "8ae39d32-abb3-4e3e-8d2d-003eda60b136" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 649.870422] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0517eb1-8b89-4b00-ba55-901545252c5d {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.886496] env[68040]: DEBUG nova.compute.provider_tree [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 649.890259] env[68040]: DEBUG nova.compute.manager [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 649.901246] env[68040]: DEBUG nova.scheduler.client.report [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 649.922087] env[68040]: DEBUG oslo_concurrency.lockutils [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.387s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 649.922601] env[68040]: DEBUG nova.compute.manager [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Start building networks asynchronously for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 649.979475] env[68040]: DEBUG oslo_concurrency.lockutils [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 649.979857] env[68040]: DEBUG oslo_concurrency.lockutils [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 649.981798] env[68040]: INFO nova.compute.claims [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 649.988959] env[68040]: DEBUG nova.compute.utils [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Using /dev/sd instead of None {{(pid=68040) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 649.990966] env[68040]: DEBUG nova.compute.manager [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Allocating IP information in the background. {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 649.991496] env[68040]: DEBUG nova.network.neutron [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] [instance: 4cc61343-486f-466c-9881-1a6856c82748] allocate_for_instance() {{(pid=68040) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 650.008844] env[68040]: DEBUG nova.compute.manager [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Start building block device mappings for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 650.049128] env[68040]: DEBUG oslo_concurrency.lockutils [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Acquiring lock "f23e3529-19a6-4562-ae9b-591d1a452385" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 650.049128] env[68040]: DEBUG oslo_concurrency.lockutils [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Lock "f23e3529-19a6-4562-ae9b-591d1a452385" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 650.064235] env[68040]: DEBUG nova.compute.manager [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 650.108796] env[68040]: DEBUG oslo_concurrency.lockutils [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 650.109096] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] Processing image 8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 650.109292] env[68040]: DEBUG oslo_concurrency.lockutils [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 650.109462] env[68040]: DEBUG oslo_concurrency.lockutils [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 650.109825] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 650.112581] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fa044740-4770-4b1e-887a-4f1c4318f752 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.123355] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 650.123558] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68040) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 650.124411] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f75c8120-54e3-4723-8a4f-4cfd589ee049 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.146534] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-26fb1517-f97f-4cc6-a6ab-3e6879362758 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.154930] env[68040]: DEBUG nova.compute.manager [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Start spawning the instance on the hypervisor. {{(pid=68040) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 650.162273] env[68040]: DEBUG oslo_vmware.api [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Waiting for the task: (returnval){ [ 650.162273] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]527a2670-5157-2078-2eed-70c4656e8733" [ 650.162273] env[68040]: _type = "Task" [ 650.162273] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.173956] env[68040]: DEBUG oslo_vmware.api [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]527a2670-5157-2078-2eed-70c4656e8733, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.185125] env[68040]: DEBUG oslo_concurrency.lockutils [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 650.203043] env[68040]: DEBUG nova.virt.hardware [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:59:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:59:33Z,direct_url=,disk_format='vmdk',id=8c308313-03d5-40b6-a5fe-9037e32dc76e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0770d674a39c40089de0aade9440b370',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:59:34Z,virtual_size=,visibility=), allow threads: False {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 650.204880] env[68040]: DEBUG nova.virt.hardware [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Flavor limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 650.205083] env[68040]: DEBUG nova.virt.hardware [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Image limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 650.205283] env[68040]: DEBUG nova.virt.hardware [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Flavor pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 650.205429] env[68040]: DEBUG nova.virt.hardware [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Image pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 650.205586] env[68040]: DEBUG nova.virt.hardware [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 650.207071] env[68040]: DEBUG nova.virt.hardware [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 650.207071] env[68040]: DEBUG nova.virt.hardware [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 650.207071] env[68040]: DEBUG nova.virt.hardware [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Got 1 possible topologies {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 650.207797] env[68040]: DEBUG nova.virt.hardware [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 650.208080] env[68040]: DEBUG nova.virt.hardware [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 650.208962] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45bbe62f-812e-4a29-bbcf-f241dc24e5c7 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.221758] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ae6b5c3-fba1-444c-968d-a0612db693ad {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.261761] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1f4c39d-797c-4d8e-b34e-e5aa5424e598 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.272312] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f23ebba0-5d26-4514-a1ed-b9c602279711 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.307628] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2730a524-4b57-4331-8005-7f273203067a {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.314255] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9368e4b9-9699-4fae-ac9c-861631907da6 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.330576] env[68040]: DEBUG nova.compute.provider_tree [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 650.347353] env[68040]: DEBUG nova.scheduler.client.report [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 650.371735] env[68040]: DEBUG oslo_concurrency.lockutils [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.388s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 650.371735] env[68040]: DEBUG nova.compute.manager [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] Start building networks asynchronously for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 650.374111] env[68040]: DEBUG oslo_concurrency.lockutils [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.189s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 650.376415] env[68040]: INFO nova.compute.claims [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 650.445282] env[68040]: DEBUG nova.compute.utils [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Using /dev/sd instead of None {{(pid=68040) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 650.449604] env[68040]: DEBUG nova.compute.manager [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] Not allocating networking since 'none' was specified. {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1968}} [ 650.468337] env[68040]: DEBUG nova.compute.manager [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] Start building block device mappings for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 650.536090] env[68040]: DEBUG nova.policy [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1be55549fc3c4f3982e5a889223ad530', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bc19e6356662486d9945bf13a865e002', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68040) authorize /opt/stack/nova/nova/policy.py:203}} [ 650.594886] env[68040]: DEBUG nova.compute.manager [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] Start spawning the instance on the hypervisor. {{(pid=68040) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 650.607310] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a4bb6d1-25a1-4879-a9ab-001285d354e0 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.615018] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69c40e29-c702-440f-b467-6cc53eff2e0d {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.657091] env[68040]: DEBUG nova.virt.hardware [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:59:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:59:33Z,direct_url=,disk_format='vmdk',id=8c308313-03d5-40b6-a5fe-9037e32dc76e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0770d674a39c40089de0aade9440b370',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:59:34Z,virtual_size=,visibility=), allow threads: False {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 650.657339] env[68040]: DEBUG nova.virt.hardware [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Flavor limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 650.657493] env[68040]: DEBUG nova.virt.hardware [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Image limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 650.657667] env[68040]: DEBUG nova.virt.hardware [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Flavor pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 650.657814] env[68040]: DEBUG nova.virt.hardware [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Image pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 650.657961] env[68040]: DEBUG nova.virt.hardware [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 650.658180] env[68040]: DEBUG nova.virt.hardware [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 650.658332] env[68040]: DEBUG nova.virt.hardware [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 650.658496] env[68040]: DEBUG nova.virt.hardware [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Got 1 possible topologies {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 650.658656] env[68040]: DEBUG nova.virt.hardware [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 650.658823] env[68040]: DEBUG nova.virt.hardware [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 650.659709] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-324ffaf9-2c52-4793-9062-1579bd7574e2 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.662931] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4941f0c-9f3e-4246-94ce-6fca22e07134 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.679930] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-913e5887-3fcf-4496-9f47-6b80282a3448 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.689029] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da690524-7637-4c60-92b2-1047ae791f09 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.703835] env[68040]: DEBUG nova.compute.provider_tree [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 650.705480] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] Preparing fetch location {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 650.705688] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Creating directory with path [datastore2] vmware_temp/c0df2a27-8967-46c4-be78-8179c02f0386/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 650.706125] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fa127fdb-40ea-44ca-9c46-f4e7116b1125 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.715601] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] Instance VIF info [] {{(pid=68040) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 650.721020] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Creating folder: Project (2fbce555afd54ba19614991d67eae25f). Parent ref: group-v639956. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 650.722296] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-77c3b41f-f88d-40ee-87cc-04b71adb5bff {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.724521] env[68040]: DEBUG nova.scheduler.client.report [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 650.737130] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Created folder: Project (2fbce555afd54ba19614991d67eae25f) in parent group-v639956. [ 650.737325] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Creating folder: Instances. Parent ref: group-v639960. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 650.737543] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-481c3451-c8e5-401e-8ed4-0a5a5742757f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.741943] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Created directory with path [datastore2] vmware_temp/c0df2a27-8967-46c4-be78-8179c02f0386/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 650.742243] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] Fetch image to [datastore2] vmware_temp/c0df2a27-8967-46c4-be78-8179c02f0386/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 650.742409] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to [datastore2] vmware_temp/c0df2a27-8967-46c4-be78-8179c02f0386/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 650.745089] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4acb6a0-8de1-40d2-b7af-7bad1c77883b {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.748431] env[68040]: DEBUG oslo_concurrency.lockutils [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.374s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 650.748906] env[68040]: DEBUG nova.compute.manager [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Start building networks asynchronously for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 650.751307] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Created folder: Instances in parent group-v639960. [ 650.751532] env[68040]: DEBUG oslo.service.loopingcall [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 650.752534] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] Creating VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 650.752743] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-07a3b747-7852-47a6-876a-89ac3b390a9c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.768672] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7214c82c-5f9a-486f-8c5d-95d2eade9924 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.772378] env[68040]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 650.772378] env[68040]: value = "task-3200146" [ 650.772378] env[68040]: _type = "Task" [ 650.772378] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.781544] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b1ebb79-7b05-4e90-9117-c766746f5637 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.788682] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200146, 'name': CreateVM_Task} progress is 5%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.793403] env[68040]: DEBUG nova.compute.utils [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Using /dev/sd instead of None {{(pid=68040) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 650.819782] env[68040]: DEBUG nova.compute.manager [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Allocating IP information in the background. {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 650.820022] env[68040]: DEBUG nova.network.neutron [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] allocate_for_instance() {{(pid=68040) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 650.823771] env[68040]: DEBUG nova.compute.manager [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Start building block device mappings for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 650.826669] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4873695b-36d5-4ff7-86f6-f5be653bdf99 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.833722] env[68040]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-cc703853-4ae7-4847-8203-5ebae3b12ff3 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.864050] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 650.915398] env[68040]: DEBUG nova.compute.manager [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Start spawning the instance on the hypervisor. {{(pid=68040) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 650.939536] env[68040]: DEBUG oslo_vmware.rw_handles [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c0df2a27-8967-46c4-be78-8179c02f0386/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 650.943540] env[68040]: DEBUG nova.virt.hardware [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:59:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:59:33Z,direct_url=,disk_format='vmdk',id=8c308313-03d5-40b6-a5fe-9037e32dc76e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0770d674a39c40089de0aade9440b370',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:59:34Z,virtual_size=,visibility=), allow threads: False {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 650.943809] env[68040]: DEBUG nova.virt.hardware [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Flavor limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 650.943965] env[68040]: DEBUG nova.virt.hardware [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Image limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 650.944178] env[68040]: DEBUG nova.virt.hardware [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Flavor pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 650.944326] env[68040]: DEBUG nova.virt.hardware [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Image pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 650.944472] env[68040]: DEBUG nova.virt.hardware [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 650.944683] env[68040]: DEBUG nova.virt.hardware [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 650.944898] env[68040]: DEBUG nova.virt.hardware [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 650.945641] env[68040]: DEBUG nova.virt.hardware [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Got 1 possible topologies {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 650.946054] env[68040]: DEBUG nova.virt.hardware [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 650.946270] env[68040]: DEBUG nova.virt.hardware [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 650.947847] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c028900-7fd0-48b4-a7d4-e6c2a33b0fd0 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.019749] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe78e964-8e71-435d-b2e0-7257b83b7489 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.025563] env[68040]: DEBUG oslo_vmware.rw_handles [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Completed reading data from the image iterator. {{(pid=68040) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 651.025563] env[68040]: DEBUG oslo_vmware.rw_handles [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c0df2a27-8967-46c4-be78-8179c02f0386/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 651.229723] env[68040]: DEBUG nova.compute.manager [req-67f7519a-5c05-4bde-895d-c9982062e077 req-1929238e-19cb-4843-972a-4ac0ca0bf632 service nova] [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] Received event network-vif-plugged-745bf624-5c57-41fe-8a1e-1dbabc1f4c13 {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 651.229935] env[68040]: DEBUG oslo_concurrency.lockutils [req-67f7519a-5c05-4bde-895d-c9982062e077 req-1929238e-19cb-4843-972a-4ac0ca0bf632 service nova] Acquiring lock "467ffaac-0414-4bed-af2c-d0939d90ba79-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 651.230145] env[68040]: DEBUG oslo_concurrency.lockutils [req-67f7519a-5c05-4bde-895d-c9982062e077 req-1929238e-19cb-4843-972a-4ac0ca0bf632 service nova] Lock "467ffaac-0414-4bed-af2c-d0939d90ba79-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 651.230640] env[68040]: DEBUG oslo_concurrency.lockutils [req-67f7519a-5c05-4bde-895d-c9982062e077 req-1929238e-19cb-4843-972a-4ac0ca0bf632 service nova] Lock "467ffaac-0414-4bed-af2c-d0939d90ba79-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 651.230640] env[68040]: DEBUG nova.compute.manager [req-67f7519a-5c05-4bde-895d-c9982062e077 req-1929238e-19cb-4843-972a-4ac0ca0bf632 service nova] [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] No waiting events found dispatching network-vif-plugged-745bf624-5c57-41fe-8a1e-1dbabc1f4c13 {{(pid=68040) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 651.230640] env[68040]: WARNING nova.compute.manager [req-67f7519a-5c05-4bde-895d-c9982062e077 req-1929238e-19cb-4843-972a-4ac0ca0bf632 service nova] [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] Received unexpected event network-vif-plugged-745bf624-5c57-41fe-8a1e-1dbabc1f4c13 for instance with vm_state building and task_state spawning. [ 651.247052] env[68040]: DEBUG nova.policy [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '45e5f1a53c43425288927ba1194efd7f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7c339e0a904a43e99c5fe8bb6da01be1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68040) authorize /opt/stack/nova/nova/policy.py:203}} [ 651.286564] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200146, 'name': CreateVM_Task, 'duration_secs': 0.311097} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.286676] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] Created VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 651.287085] env[68040]: DEBUG oslo_concurrency.lockutils [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 651.287252] env[68040]: DEBUG oslo_concurrency.lockutils [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 651.287558] env[68040]: DEBUG oslo_concurrency.lockutils [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 651.287803] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9bd0055-580c-4b37-8169-50ffb7513b0c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.292514] env[68040]: DEBUG oslo_vmware.api [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Waiting for the task: (returnval){ [ 651.292514] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52882fe9-4afc-a83f-7bdc-b1b81b4efb19" [ 651.292514] env[68040]: _type = "Task" [ 651.292514] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.305249] env[68040]: DEBUG oslo_vmware.api [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52882fe9-4afc-a83f-7bdc-b1b81b4efb19, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.808383] env[68040]: DEBUG oslo_concurrency.lockutils [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 651.808681] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] Processing image 8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 651.808887] env[68040]: DEBUG oslo_concurrency.lockutils [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 652.111461] env[68040]: DEBUG nova.network.neutron [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] Successfully updated port: ff42878f-c30e-4cd9-af66-1ac3a9ad0f6e {{(pid=68040) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 652.132672] env[68040]: DEBUG oslo_concurrency.lockutils [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Acquiring lock "refresh_cache-7374c1a5-1b4f-4026-b885-bf0eb12a850e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 652.132851] env[68040]: DEBUG oslo_concurrency.lockutils [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Acquired lock "refresh_cache-7374c1a5-1b4f-4026-b885-bf0eb12a850e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 652.133013] env[68040]: DEBUG nova.network.neutron [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] Building network info cache for instance {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 652.228651] env[68040]: DEBUG nova.network.neutron [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Successfully created port: 63eb2087-bb53-4d97-bceb-bfee13ccc78c {{(pid=68040) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 652.391462] env[68040]: DEBUG oslo_concurrency.lockutils [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Acquiring lock "17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 652.392458] env[68040]: DEBUG oslo_concurrency.lockutils [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Lock "17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 652.410956] env[68040]: DEBUG nova.compute.manager [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 652.519700] env[68040]: DEBUG oslo_concurrency.lockutils [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 652.520105] env[68040]: DEBUG oslo_concurrency.lockutils [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 652.521611] env[68040]: INFO nova.compute.claims [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 652.571157] env[68040]: DEBUG nova.network.neutron [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 652.743201] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9027a1c3-56a3-4bfe-ac4e-73aab07fd2ba {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.750692] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6939129-538c-479f-9f72-937551949089 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.784675] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d882e095-3a22-4cd3-9475-68871ad5a287 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.794198] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e567331-945b-4977-86d7-41dff01852ac {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.816508] env[68040]: DEBUG nova.compute.provider_tree [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 652.832025] env[68040]: DEBUG nova.scheduler.client.report [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 652.855910] env[68040]: DEBUG oslo_concurrency.lockutils [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.336s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 652.859178] env[68040]: DEBUG nova.compute.manager [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Start building networks asynchronously for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 652.916711] env[68040]: DEBUG nova.compute.utils [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Using /dev/sd instead of None {{(pid=68040) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 652.918891] env[68040]: DEBUG nova.compute.manager [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Allocating IP information in the background. {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 652.919110] env[68040]: DEBUG nova.network.neutron [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] allocate_for_instance() {{(pid=68040) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 652.935752] env[68040]: DEBUG nova.compute.manager [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Start building block device mappings for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 653.001403] env[68040]: DEBUG nova.network.neutron [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Successfully created port: 57121e1d-ce6b-4afe-b723-0d0e8e6f6051 {{(pid=68040) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 653.060974] env[68040]: DEBUG nova.compute.manager [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Start spawning the instance on the hypervisor. {{(pid=68040) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 653.094427] env[68040]: DEBUG nova.virt.hardware [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:59:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:59:33Z,direct_url=,disk_format='vmdk',id=8c308313-03d5-40b6-a5fe-9037e32dc76e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0770d674a39c40089de0aade9440b370',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:59:34Z,virtual_size=,visibility=), allow threads: False {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 653.094661] env[68040]: DEBUG nova.virt.hardware [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Flavor limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 653.094848] env[68040]: DEBUG nova.virt.hardware [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Image limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 653.095995] env[68040]: DEBUG nova.virt.hardware [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Flavor pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 653.095995] env[68040]: DEBUG nova.virt.hardware [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Image pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 653.095995] env[68040]: DEBUG nova.virt.hardware [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 653.095995] env[68040]: DEBUG nova.virt.hardware [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 653.095995] env[68040]: DEBUG nova.virt.hardware [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 653.096211] env[68040]: DEBUG nova.virt.hardware [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Got 1 possible topologies {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 653.098349] env[68040]: DEBUG nova.virt.hardware [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 653.098498] env[68040]: DEBUG nova.virt.hardware [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 653.099713] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-273058ae-d4a4-48d2-85cb-1f056b6aadf5 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.113801] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f741f078-5e2b-44e5-b093-aeebb640b78a {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.237826] env[68040]: DEBUG nova.policy [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3296df228bec48ceb77348dd04127d6b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c523995a5ebd4b739ec89615990c8a71', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68040) authorize /opt/stack/nova/nova/policy.py:203}} [ 653.421140] env[68040]: DEBUG nova.network.neutron [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] Updating instance_info_cache with network_info: [{"id": "ff42878f-c30e-4cd9-af66-1ac3a9ad0f6e", "address": "fa:16:3e:7a:6e:d1", "network": {"id": "88187e54-b99b-4328-bc20-35f889cb52f5", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-402678145-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bba47d6853164b709160fe935e827841", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "471f65a5-21ea-45e3-a722-4e204ed65673", "external-id": "nsx-vlan-transportzone-139", "segmentation_id": 139, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff42878f-c3", "ovs_interfaceid": "ff42878f-c30e-4cd9-af66-1ac3a9ad0f6e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 653.443466] env[68040]: DEBUG oslo_concurrency.lockutils [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Releasing lock "refresh_cache-7374c1a5-1b4f-4026-b885-bf0eb12a850e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 653.443466] env[68040]: DEBUG nova.compute.manager [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] Instance network_info: |[{"id": "ff42878f-c30e-4cd9-af66-1ac3a9ad0f6e", "address": "fa:16:3e:7a:6e:d1", "network": {"id": "88187e54-b99b-4328-bc20-35f889cb52f5", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-402678145-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bba47d6853164b709160fe935e827841", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "471f65a5-21ea-45e3-a722-4e204ed65673", "external-id": "nsx-vlan-transportzone-139", "segmentation_id": 139, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff42878f-c3", "ovs_interfaceid": "ff42878f-c30e-4cd9-af66-1ac3a9ad0f6e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 653.444166] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7a:6e:d1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '471f65a5-21ea-45e3-a722-4e204ed65673', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ff42878f-c30e-4cd9-af66-1ac3a9ad0f6e', 'vif_model': 'vmxnet3'}] {{(pid=68040) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 653.453186] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Creating folder: Project (bba47d6853164b709160fe935e827841). Parent ref: group-v639956. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 653.454313] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5ea9a639-7450-45e7-9af9-8d2504a544f1 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.468389] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Created folder: Project (bba47d6853164b709160fe935e827841) in parent group-v639956. [ 653.468706] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Creating folder: Instances. Parent ref: group-v639963. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 653.469556] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6feb4740-2ae9-4a08-838a-cb6b9e9c92b0 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.479076] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Created folder: Instances in parent group-v639963. [ 653.479341] env[68040]: DEBUG oslo.service.loopingcall [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 653.479950] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] Creating VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 653.479950] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cd278f08-eddd-4d52-8d5f-7c84833f5203 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.502168] env[68040]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 653.502168] env[68040]: value = "task-3200149" [ 653.502168] env[68040]: _type = "Task" [ 653.502168] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.510477] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200149, 'name': CreateVM_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.013166] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200149, 'name': CreateVM_Task, 'duration_secs': 0.377058} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.013401] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] Created VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 654.014996] env[68040]: DEBUG oslo_concurrency.lockutils [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 654.014996] env[68040]: DEBUG oslo_concurrency.lockutils [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 654.014996] env[68040]: DEBUG oslo_concurrency.lockutils [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 654.014996] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-44619d33-f3dc-4e31-bc67-bb6713128ede {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.020511] env[68040]: DEBUG oslo_vmware.api [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Waiting for the task: (returnval){ [ 654.020511] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]527b1720-5528-a08c-1a33-2b9a6804fdf0" [ 654.020511] env[68040]: _type = "Task" [ 654.020511] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.029873] env[68040]: DEBUG oslo_vmware.api [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]527b1720-5528-a08c-1a33-2b9a6804fdf0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.538971] env[68040]: DEBUG oslo_concurrency.lockutils [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 654.538971] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] Processing image 8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 654.538971] env[68040]: DEBUG oslo_concurrency.lockutils [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 654.690256] env[68040]: DEBUG nova.compute.manager [req-981dad1c-099f-4067-b261-eef6ad20c71a req-8db8ea63-d441-4779-a57b-ec1f18b12688 service nova] [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] Received event network-vif-plugged-ff42878f-c30e-4cd9-af66-1ac3a9ad0f6e {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 654.690256] env[68040]: DEBUG oslo_concurrency.lockutils [req-981dad1c-099f-4067-b261-eef6ad20c71a req-8db8ea63-d441-4779-a57b-ec1f18b12688 service nova] Acquiring lock "7374c1a5-1b4f-4026-b885-bf0eb12a850e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 654.690256] env[68040]: DEBUG oslo_concurrency.lockutils [req-981dad1c-099f-4067-b261-eef6ad20c71a req-8db8ea63-d441-4779-a57b-ec1f18b12688 service nova] Lock "7374c1a5-1b4f-4026-b885-bf0eb12a850e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 654.690256] env[68040]: DEBUG oslo_concurrency.lockutils [req-981dad1c-099f-4067-b261-eef6ad20c71a req-8db8ea63-d441-4779-a57b-ec1f18b12688 service nova] Lock "7374c1a5-1b4f-4026-b885-bf0eb12a850e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 654.690552] env[68040]: DEBUG nova.compute.manager [req-981dad1c-099f-4067-b261-eef6ad20c71a req-8db8ea63-d441-4779-a57b-ec1f18b12688 service nova] [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] No waiting events found dispatching network-vif-plugged-ff42878f-c30e-4cd9-af66-1ac3a9ad0f6e {{(pid=68040) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 654.690552] env[68040]: WARNING nova.compute.manager [req-981dad1c-099f-4067-b261-eef6ad20c71a req-8db8ea63-d441-4779-a57b-ec1f18b12688 service nova] [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] Received unexpected event network-vif-plugged-ff42878f-c30e-4cd9-af66-1ac3a9ad0f6e for instance with vm_state building and task_state spawning. [ 655.118741] env[68040]: DEBUG nova.network.neutron [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Successfully created port: 669990b8-9fa9-4d05-bcd1-ec726196c14f {{(pid=68040) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 655.254834] env[68040]: DEBUG nova.compute.manager [req-476ba089-ab3b-40e8-b7e9-b2241bda0c8f req-718a12d6-9197-44da-a5cf-cc8f1b7cafbf service nova] [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] Received event network-changed-745bf624-5c57-41fe-8a1e-1dbabc1f4c13 {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 655.255092] env[68040]: DEBUG nova.compute.manager [req-476ba089-ab3b-40e8-b7e9-b2241bda0c8f req-718a12d6-9197-44da-a5cf-cc8f1b7cafbf service nova] [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] Refreshing instance network info cache due to event network-changed-745bf624-5c57-41fe-8a1e-1dbabc1f4c13. {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 655.255314] env[68040]: DEBUG oslo_concurrency.lockutils [req-476ba089-ab3b-40e8-b7e9-b2241bda0c8f req-718a12d6-9197-44da-a5cf-cc8f1b7cafbf service nova] Acquiring lock "refresh_cache-467ffaac-0414-4bed-af2c-d0939d90ba79" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 655.256083] env[68040]: DEBUG oslo_concurrency.lockutils [req-476ba089-ab3b-40e8-b7e9-b2241bda0c8f req-718a12d6-9197-44da-a5cf-cc8f1b7cafbf service nova] Acquired lock "refresh_cache-467ffaac-0414-4bed-af2c-d0939d90ba79" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 655.256083] env[68040]: DEBUG nova.network.neutron [req-476ba089-ab3b-40e8-b7e9-b2241bda0c8f req-718a12d6-9197-44da-a5cf-cc8f1b7cafbf service nova] [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] Refreshing network info cache for port 745bf624-5c57-41fe-8a1e-1dbabc1f4c13 {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 655.315107] env[68040]: DEBUG nova.network.neutron [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Successfully updated port: 63eb2087-bb53-4d97-bceb-bfee13ccc78c {{(pid=68040) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 655.334032] env[68040]: DEBUG oslo_concurrency.lockutils [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Acquiring lock "refresh_cache-4cc61343-486f-466c-9881-1a6856c82748" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 655.334032] env[68040]: DEBUG oslo_concurrency.lockutils [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Acquired lock "refresh_cache-4cc61343-486f-466c-9881-1a6856c82748" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 655.334032] env[68040]: DEBUG nova.network.neutron [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Building network info cache for instance {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 655.447261] env[68040]: DEBUG nova.network.neutron [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 655.770242] env[68040]: DEBUG oslo_concurrency.lockutils [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Acquiring lock "3411cba3-71c9-4334-bc79-4e322f4231f1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 655.771020] env[68040]: DEBUG oslo_concurrency.lockutils [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Lock "3411cba3-71c9-4334-bc79-4e322f4231f1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 655.786389] env[68040]: DEBUG nova.compute.manager [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 655.820121] env[68040]: DEBUG nova.network.neutron [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Successfully updated port: 57121e1d-ce6b-4afe-b723-0d0e8e6f6051 {{(pid=68040) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 655.834601] env[68040]: DEBUG oslo_concurrency.lockutils [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Acquiring lock "refresh_cache-f23e3529-19a6-4562-ae9b-591d1a452385" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 655.834959] env[68040]: DEBUG oslo_concurrency.lockutils [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Acquired lock "refresh_cache-f23e3529-19a6-4562-ae9b-591d1a452385" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 655.835369] env[68040]: DEBUG nova.network.neutron [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Building network info cache for instance {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 655.873712] env[68040]: DEBUG oslo_concurrency.lockutils [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 655.874034] env[68040]: DEBUG oslo_concurrency.lockutils [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 655.875526] env[68040]: INFO nova.compute.claims [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 655.988497] env[68040]: DEBUG nova.network.neutron [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 656.114183] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e156289-02d5-4a92-8ace-477ebd217448 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.122973] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab3d2504-960f-474d-856b-053fbffefa61 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.158446] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa4f9f82-c165-4d54-90f6-ee89e12ac321 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.168138] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1069da01-5122-44f3-b47e-11e06b4d8c9d {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.187454] env[68040]: DEBUG nova.compute.provider_tree [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 656.199193] env[68040]: DEBUG nova.scheduler.client.report [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 656.229475] env[68040]: DEBUG oslo_concurrency.lockutils [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.355s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 656.229978] env[68040]: DEBUG nova.compute.manager [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] Start building networks asynchronously for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 656.295493] env[68040]: DEBUG nova.compute.utils [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Using /dev/sd instead of None {{(pid=68040) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 656.298319] env[68040]: DEBUG nova.compute.manager [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] Allocating IP information in the background. {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 656.298319] env[68040]: DEBUG nova.network.neutron [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] allocate_for_instance() {{(pid=68040) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 656.306979] env[68040]: DEBUG nova.compute.manager [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] Start building block device mappings for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 656.335681] env[68040]: DEBUG nova.network.neutron [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Updating instance_info_cache with network_info: [{"id": "63eb2087-bb53-4d97-bceb-bfee13ccc78c", "address": "fa:16:3e:9a:d6:09", "network": {"id": "ca1a84af-ab33-497c-8767-fd4463c076be", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.230", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "0770d674a39c40089de0aade9440b370", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63eb2087-bb", "ovs_interfaceid": "63eb2087-bb53-4d97-bceb-bfee13ccc78c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 656.355428] env[68040]: DEBUG oslo_concurrency.lockutils [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Releasing lock "refresh_cache-4cc61343-486f-466c-9881-1a6856c82748" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 656.355428] env[68040]: DEBUG nova.compute.manager [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Instance network_info: |[{"id": "63eb2087-bb53-4d97-bceb-bfee13ccc78c", "address": "fa:16:3e:9a:d6:09", "network": {"id": "ca1a84af-ab33-497c-8767-fd4463c076be", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.230", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "0770d674a39c40089de0aade9440b370", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63eb2087-bb", "ovs_interfaceid": "63eb2087-bb53-4d97-bceb-bfee13ccc78c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 656.361549] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9a:d6:09', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7e0240aa-a694-48fc-a0f9-6f2d3e71aa12', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '63eb2087-bb53-4d97-bceb-bfee13ccc78c', 'vif_model': 'vmxnet3'}] {{(pid=68040) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 656.374352] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Creating folder: Project (bc19e6356662486d9945bf13a865e002). Parent ref: group-v639956. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 656.375895] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-da4d749f-f6ef-4f94-a06b-5e221b7f45d2 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.395018] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Created folder: Project (bc19e6356662486d9945bf13a865e002) in parent group-v639956. [ 656.395369] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Creating folder: Instances. Parent ref: group-v639966. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 656.395703] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e7debbe0-488b-4b12-80bd-ea4e15aaddb6 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.408415] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Created folder: Instances in parent group-v639966. [ 656.408415] env[68040]: DEBUG oslo.service.loopingcall [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 656.410315] env[68040]: DEBUG nova.compute.manager [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] Start spawning the instance on the hypervisor. {{(pid=68040) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 656.412813] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Creating VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 656.413436] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1dc6b123-e609-4dac-b57b-c2b7cb003508 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.442175] env[68040]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 656.442175] env[68040]: value = "task-3200152" [ 656.442175] env[68040]: _type = "Task" [ 656.442175] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.449096] env[68040]: DEBUG nova.virt.hardware [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:59:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:59:33Z,direct_url=,disk_format='vmdk',id=8c308313-03d5-40b6-a5fe-9037e32dc76e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0770d674a39c40089de0aade9440b370',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:59:34Z,virtual_size=,visibility=), allow threads: False {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 656.449337] env[68040]: DEBUG nova.virt.hardware [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Flavor limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 656.449492] env[68040]: DEBUG nova.virt.hardware [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Image limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 656.449671] env[68040]: DEBUG nova.virt.hardware [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Flavor pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 656.449815] env[68040]: DEBUG nova.virt.hardware [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Image pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 656.449959] env[68040]: DEBUG nova.virt.hardware [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 656.450183] env[68040]: DEBUG nova.virt.hardware [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 656.450340] env[68040]: DEBUG nova.virt.hardware [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 656.450502] env[68040]: DEBUG nova.virt.hardware [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Got 1 possible topologies {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 656.451113] env[68040]: DEBUG nova.virt.hardware [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 656.451113] env[68040]: DEBUG nova.virt.hardware [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 656.452080] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d37f0db-a0db-4252-9647-6b691e6e6799 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.457784] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200152, 'name': CreateVM_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.462672] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5412ebb5-b205-4396-8469-30f97b5e62ae {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.517183] env[68040]: DEBUG nova.policy [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '697d3700fa134bc1af625faea8349c1c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dd6e3befaffa492c8eb487ac87c42785', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68040) authorize /opt/stack/nova/nova/policy.py:203}} [ 656.619111] env[68040]: DEBUG nova.network.neutron [req-476ba089-ab3b-40e8-b7e9-b2241bda0c8f req-718a12d6-9197-44da-a5cf-cc8f1b7cafbf service nova] [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] Updated VIF entry in instance network info cache for port 745bf624-5c57-41fe-8a1e-1dbabc1f4c13. {{(pid=68040) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 656.619757] env[68040]: DEBUG nova.network.neutron [req-476ba089-ab3b-40e8-b7e9-b2241bda0c8f req-718a12d6-9197-44da-a5cf-cc8f1b7cafbf service nova] [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] Updating instance_info_cache with network_info: [{"id": "745bf624-5c57-41fe-8a1e-1dbabc1f4c13", "address": "fa:16:3e:b0:18:2b", "network": {"id": "ca1a84af-ab33-497c-8767-fd4463c076be", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.47", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "0770d674a39c40089de0aade9440b370", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap745bf624-5c", "ovs_interfaceid": "745bf624-5c57-41fe-8a1e-1dbabc1f4c13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 656.637165] env[68040]: DEBUG oslo_concurrency.lockutils [req-476ba089-ab3b-40e8-b7e9-b2241bda0c8f req-718a12d6-9197-44da-a5cf-cc8f1b7cafbf service nova] Releasing lock "refresh_cache-467ffaac-0414-4bed-af2c-d0939d90ba79" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 656.773384] env[68040]: DEBUG nova.network.neutron [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Updating instance_info_cache with network_info: [{"id": "57121e1d-ce6b-4afe-b723-0d0e8e6f6051", "address": "fa:16:3e:02:f3:87", "network": {"id": "75323b07-43f8-4219-ae49-9d83d10b9ae8", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-971332642-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c339e0a904a43e99c5fe8bb6da01be1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d054505-89d3-49c5-8b38-5da917a42c49", "external-id": "nsx-vlan-transportzone-888", "segmentation_id": 888, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57121e1d-ce", "ovs_interfaceid": "57121e1d-ce6b-4afe-b723-0d0e8e6f6051", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 656.789150] env[68040]: DEBUG oslo_concurrency.lockutils [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Releasing lock "refresh_cache-f23e3529-19a6-4562-ae9b-591d1a452385" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 656.789462] env[68040]: DEBUG nova.compute.manager [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Instance network_info: |[{"id": "57121e1d-ce6b-4afe-b723-0d0e8e6f6051", "address": "fa:16:3e:02:f3:87", "network": {"id": "75323b07-43f8-4219-ae49-9d83d10b9ae8", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-971332642-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c339e0a904a43e99c5fe8bb6da01be1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d054505-89d3-49c5-8b38-5da917a42c49", "external-id": "nsx-vlan-transportzone-888", "segmentation_id": 888, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57121e1d-ce", "ovs_interfaceid": "57121e1d-ce6b-4afe-b723-0d0e8e6f6051", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 656.789873] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:02:f3:87', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6d054505-89d3-49c5-8b38-5da917a42c49', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '57121e1d-ce6b-4afe-b723-0d0e8e6f6051', 'vif_model': 'vmxnet3'}] {{(pid=68040) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 656.798324] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Creating folder: Project (7c339e0a904a43e99c5fe8bb6da01be1). Parent ref: group-v639956. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 656.799671] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-064bc4f7-b9d8-41f3-8147-e5d061a216e8 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.811160] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Created folder: Project (7c339e0a904a43e99c5fe8bb6da01be1) in parent group-v639956. [ 656.811302] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Creating folder: Instances. Parent ref: group-v639969. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 656.811538] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ef1365d0-c9c2-4ca0-a575-b2770ff87c91 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.822259] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Created folder: Instances in parent group-v639969. [ 656.822533] env[68040]: DEBUG oslo.service.loopingcall [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 656.823168] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Creating VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 656.823168] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4fbaa5b0-210d-4946-ad7e-1c3cb6abb0e8 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.847498] env[68040]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 656.847498] env[68040]: value = "task-3200155" [ 656.847498] env[68040]: _type = "Task" [ 656.847498] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.858239] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200155, 'name': CreateVM_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.962813] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200152, 'name': CreateVM_Task, 'duration_secs': 0.309437} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.962813] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Created VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 656.963346] env[68040]: DEBUG oslo_concurrency.lockutils [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 656.963545] env[68040]: DEBUG oslo_concurrency.lockutils [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 656.963908] env[68040]: DEBUG oslo_concurrency.lockutils [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 656.964863] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-768e9bf0-bc14-4e8c-8016-830345296ee0 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.970169] env[68040]: DEBUG oslo_vmware.api [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Waiting for the task: (returnval){ [ 656.970169] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]5258a4b2-d02a-7fe4-36ed-c2a2cb447452" [ 656.970169] env[68040]: _type = "Task" [ 656.970169] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.981270] env[68040]: DEBUG oslo_vmware.api [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]5258a4b2-d02a-7fe4-36ed-c2a2cb447452, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.359035] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200155, 'name': CreateVM_Task, 'duration_secs': 0.358689} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.360043] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Created VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 657.360043] env[68040]: DEBUG oslo_concurrency.lockutils [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 657.485566] env[68040]: DEBUG oslo_concurrency.lockutils [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 657.485847] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Processing image 8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 657.486120] env[68040]: DEBUG oslo_concurrency.lockutils [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 657.486353] env[68040]: DEBUG oslo_concurrency.lockutils [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 657.486698] env[68040]: DEBUG oslo_concurrency.lockutils [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 657.486998] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c0f3af00-9f7d-4f93-a479-bfc24f00222c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.496607] env[68040]: DEBUG oslo_vmware.api [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Waiting for the task: (returnval){ [ 657.496607] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]5293b102-3db9-fc08-2b06-4e76784b29e8" [ 657.496607] env[68040]: _type = "Task" [ 657.496607] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.511888] env[68040]: DEBUG oslo_concurrency.lockutils [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 657.512224] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Processing image 8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 657.512465] env[68040]: DEBUG oslo_concurrency.lockutils [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 657.764841] env[68040]: DEBUG nova.network.neutron [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Successfully updated port: 669990b8-9fa9-4d05-bcd1-ec726196c14f {{(pid=68040) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 657.784978] env[68040]: DEBUG oslo_concurrency.lockutils [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Acquiring lock "refresh_cache-17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 657.784978] env[68040]: DEBUG oslo_concurrency.lockutils [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Acquired lock "refresh_cache-17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 657.784978] env[68040]: DEBUG nova.network.neutron [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Building network info cache for instance {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 658.007964] env[68040]: DEBUG nova.network.neutron [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] Successfully created port: 45a2eb6d-a522-4966-a710-81ffcaf1c461 {{(pid=68040) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 658.094759] env[68040]: DEBUG nova.network.neutron [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 658.853757] env[68040]: DEBUG nova.compute.manager [req-f03329bf-321d-413b-a843-525a7704c592 req-54229e54-784b-4038-8f21-1d00e9a3cfd3 service nova] [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] Received event network-changed-ff42878f-c30e-4cd9-af66-1ac3a9ad0f6e {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 658.853757] env[68040]: DEBUG nova.compute.manager [req-f03329bf-321d-413b-a843-525a7704c592 req-54229e54-784b-4038-8f21-1d00e9a3cfd3 service nova] [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] Refreshing instance network info cache due to event network-changed-ff42878f-c30e-4cd9-af66-1ac3a9ad0f6e. {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 658.853757] env[68040]: DEBUG oslo_concurrency.lockutils [req-f03329bf-321d-413b-a843-525a7704c592 req-54229e54-784b-4038-8f21-1d00e9a3cfd3 service nova] Acquiring lock "refresh_cache-7374c1a5-1b4f-4026-b885-bf0eb12a850e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 658.853757] env[68040]: DEBUG oslo_concurrency.lockutils [req-f03329bf-321d-413b-a843-525a7704c592 req-54229e54-784b-4038-8f21-1d00e9a3cfd3 service nova] Acquired lock "refresh_cache-7374c1a5-1b4f-4026-b885-bf0eb12a850e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 658.853757] env[68040]: DEBUG nova.network.neutron [req-f03329bf-321d-413b-a843-525a7704c592 req-54229e54-784b-4038-8f21-1d00e9a3cfd3 service nova] [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] Refreshing network info cache for port ff42878f-c30e-4cd9-af66-1ac3a9ad0f6e {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 658.889029] env[68040]: DEBUG nova.network.neutron [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Updating instance_info_cache with network_info: [{"id": "669990b8-9fa9-4d05-bcd1-ec726196c14f", "address": "fa:16:3e:93:51:82", "network": {"id": "ca1a84af-ab33-497c-8767-fd4463c076be", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.87", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "0770d674a39c40089de0aade9440b370", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap669990b8-9f", "ovs_interfaceid": "669990b8-9fa9-4d05-bcd1-ec726196c14f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 658.912330] env[68040]: DEBUG oslo_concurrency.lockutils [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Releasing lock "refresh_cache-17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 658.912715] env[68040]: DEBUG nova.compute.manager [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Instance network_info: |[{"id": "669990b8-9fa9-4d05-bcd1-ec726196c14f", "address": "fa:16:3e:93:51:82", "network": {"id": "ca1a84af-ab33-497c-8767-fd4463c076be", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.87", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "0770d674a39c40089de0aade9440b370", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap669990b8-9f", "ovs_interfaceid": "669990b8-9fa9-4d05-bcd1-ec726196c14f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 658.913411] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:93:51:82', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7e0240aa-a694-48fc-a0f9-6f2d3e71aa12', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '669990b8-9fa9-4d05-bcd1-ec726196c14f', 'vif_model': 'vmxnet3'}] {{(pid=68040) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 658.922119] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Creating folder: Project (c523995a5ebd4b739ec89615990c8a71). Parent ref: group-v639956. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 658.923015] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4e21dd77-8c86-4a8a-8794-2633f62488df {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.937277] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Created folder: Project (c523995a5ebd4b739ec89615990c8a71) in parent group-v639956. [ 658.937511] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Creating folder: Instances. Parent ref: group-v639972. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 658.937810] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-38292e8e-1bd8-4430-b649-4c6ffb23c35a {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.949400] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Created folder: Instances in parent group-v639972. [ 658.949514] env[68040]: DEBUG oslo.service.loopingcall [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 658.949808] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Creating VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 658.949907] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-da79f37e-9f5f-4094-8166-2cc159a065af {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.976923] env[68040]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 658.976923] env[68040]: value = "task-3200158" [ 658.976923] env[68040]: _type = "Task" [ 658.976923] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.988600] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200158, 'name': CreateVM_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.992279] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 658.992616] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 658.992812] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Starting heal instance info cache {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 658.992930] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Rebuilding the list of instances to heal {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 659.019389] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 659.019939] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 659.019939] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 659.019939] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 659.019939] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 659.020348] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 659.020561] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 659.020689] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Didn't find any instances for network info cache update. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 659.021215] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 659.021460] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 659.021745] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 659.022020] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 659.022506] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 659.022506] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 659.022620] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68040) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 659.022767] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 659.040183] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 659.040183] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 659.040183] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.001s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 659.040183] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68040) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 659.041080] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acd758da-9451-4b19-86cb-a3a425d992bb {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.051178] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed78a101-5c50-4e3d-8ef9-cb465d09d6e1 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.071607] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b832dfe-0efa-4121-84fc-e2bb8ab7c872 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.080331] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cd6475d-7f83-45c6-9e4b-ef464e158a76 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.115059] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181009MB free_disk=126GB free_vcpus=48 pci_devices=None {{(pid=68040) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 659.115278] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 659.115542] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 659.121517] env[68040]: DEBUG oslo_concurrency.lockutils [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Acquiring lock "97b050ff-2997-4504-8787-04f1221251b8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 659.121517] env[68040]: DEBUG oslo_concurrency.lockutils [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Lock "97b050ff-2997-4504-8787-04f1221251b8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 659.140128] env[68040]: DEBUG nova.compute.manager [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 659.235034] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 467ffaac-0414-4bed-af2c-d0939d90ba79 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 659.235277] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 7374c1a5-1b4f-4026-b885-bf0eb12a850e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 659.236163] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 4cc61343-486f-466c-9881-1a6856c82748 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 659.236610] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 8ae39d32-abb3-4e3e-8d2d-003eda60b136 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 659.236693] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance f23e3529-19a6-4562-ae9b-591d1a452385 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 659.236800] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 659.236925] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 3411cba3-71c9-4334-bc79-4e322f4231f1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 659.239151] env[68040]: DEBUG oslo_concurrency.lockutils [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 659.268872] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 97b050ff-2997-4504-8787-04f1221251b8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 659.269109] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 659.269255] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1408MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 659.420050] env[68040]: DEBUG nova.compute.manager [req-4e520af1-537a-4f31-b089-48086b0cee37 req-8b233978-e2c2-4c1b-9748-1f1b32d7bd67 service nova] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Received event network-vif-plugged-63eb2087-bb53-4d97-bceb-bfee13ccc78c {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 659.420098] env[68040]: DEBUG oslo_concurrency.lockutils [req-4e520af1-537a-4f31-b089-48086b0cee37 req-8b233978-e2c2-4c1b-9748-1f1b32d7bd67 service nova] Acquiring lock "4cc61343-486f-466c-9881-1a6856c82748-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 659.420783] env[68040]: DEBUG oslo_concurrency.lockutils [req-4e520af1-537a-4f31-b089-48086b0cee37 req-8b233978-e2c2-4c1b-9748-1f1b32d7bd67 service nova] Lock "4cc61343-486f-466c-9881-1a6856c82748-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 659.420783] env[68040]: DEBUG oslo_concurrency.lockutils [req-4e520af1-537a-4f31-b089-48086b0cee37 req-8b233978-e2c2-4c1b-9748-1f1b32d7bd67 service nova] Lock "4cc61343-486f-466c-9881-1a6856c82748-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 659.420783] env[68040]: DEBUG nova.compute.manager [req-4e520af1-537a-4f31-b089-48086b0cee37 req-8b233978-e2c2-4c1b-9748-1f1b32d7bd67 service nova] [instance: 4cc61343-486f-466c-9881-1a6856c82748] No waiting events found dispatching network-vif-plugged-63eb2087-bb53-4d97-bceb-bfee13ccc78c {{(pid=68040) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 659.420873] env[68040]: WARNING nova.compute.manager [req-4e520af1-537a-4f31-b089-48086b0cee37 req-8b233978-e2c2-4c1b-9748-1f1b32d7bd67 service nova] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Received unexpected event network-vif-plugged-63eb2087-bb53-4d97-bceb-bfee13ccc78c for instance with vm_state building and task_state spawning. [ 659.422860] env[68040]: DEBUG nova.compute.manager [req-4e520af1-537a-4f31-b089-48086b0cee37 req-8b233978-e2c2-4c1b-9748-1f1b32d7bd67 service nova] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Received event network-changed-63eb2087-bb53-4d97-bceb-bfee13ccc78c {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 659.422860] env[68040]: DEBUG nova.compute.manager [req-4e520af1-537a-4f31-b089-48086b0cee37 req-8b233978-e2c2-4c1b-9748-1f1b32d7bd67 service nova] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Refreshing instance network info cache due to event network-changed-63eb2087-bb53-4d97-bceb-bfee13ccc78c. {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 659.422860] env[68040]: DEBUG oslo_concurrency.lockutils [req-4e520af1-537a-4f31-b089-48086b0cee37 req-8b233978-e2c2-4c1b-9748-1f1b32d7bd67 service nova] Acquiring lock "refresh_cache-4cc61343-486f-466c-9881-1a6856c82748" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 659.422860] env[68040]: DEBUG oslo_concurrency.lockutils [req-4e520af1-537a-4f31-b089-48086b0cee37 req-8b233978-e2c2-4c1b-9748-1f1b32d7bd67 service nova] Acquired lock "refresh_cache-4cc61343-486f-466c-9881-1a6856c82748" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 659.422860] env[68040]: DEBUG nova.network.neutron [req-4e520af1-537a-4f31-b089-48086b0cee37 req-8b233978-e2c2-4c1b-9748-1f1b32d7bd67 service nova] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Refreshing network info cache for port 63eb2087-bb53-4d97-bceb-bfee13ccc78c {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 659.471790] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cbfadc9-0cf5-4462-82dc-e142756969ac {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.485575] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab1c605d-82b0-4e1c-be46-756965f4baaf {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.498588] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200158, 'name': CreateVM_Task, 'duration_secs': 0.472451} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.535203] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Created VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 659.536556] env[68040]: DEBUG oslo_concurrency.lockutils [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 659.536647] env[68040]: DEBUG oslo_concurrency.lockutils [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 659.536992] env[68040]: DEBUG oslo_concurrency.lockutils [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 659.541025] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e97acd8-700b-41bd-aa85-ea591c97052b {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.541025] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a8811211-651e-46e9-9770-81120adb970f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.554474] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9989803d-e3f0-4efc-9092-b64d8e0514f2 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.559312] env[68040]: DEBUG oslo_vmware.api [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Waiting for the task: (returnval){ [ 659.559312] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]5280f2b2-2df9-9877-1f28-4d928e9f3ef7" [ 659.559312] env[68040]: _type = "Task" [ 659.559312] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.573023] env[68040]: DEBUG nova.compute.provider_tree [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 659.580957] env[68040]: DEBUG oslo_concurrency.lockutils [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 659.580957] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Processing image 8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 659.580957] env[68040]: DEBUG oslo_concurrency.lockutils [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 659.590468] env[68040]: DEBUG nova.scheduler.client.report [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 659.616494] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68040) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 659.617781] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.501s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 659.617781] env[68040]: DEBUG oslo_concurrency.lockutils [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.378s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 659.619514] env[68040]: INFO nova.compute.claims [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 659.884121] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9465f1b8-7831-42a6-a98c-03058b41ec37 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.891847] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa8d89cc-0738-4dc9-912c-bba4d69556d3 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.926544] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb002756-71ee-48f0-b6e9-a5843353e3a6 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.935910] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7578dcb2-1cb5-40ef-8593-5a489693629a {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.953369] env[68040]: DEBUG nova.compute.provider_tree [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 659.974929] env[68040]: DEBUG nova.scheduler.client.report [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 659.998358] env[68040]: DEBUG nova.network.neutron [req-f03329bf-321d-413b-a843-525a7704c592 req-54229e54-784b-4038-8f21-1d00e9a3cfd3 service nova] [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] Updated VIF entry in instance network info cache for port ff42878f-c30e-4cd9-af66-1ac3a9ad0f6e. {{(pid=68040) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 659.998512] env[68040]: DEBUG nova.network.neutron [req-f03329bf-321d-413b-a843-525a7704c592 req-54229e54-784b-4038-8f21-1d00e9a3cfd3 service nova] [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] Updating instance_info_cache with network_info: [{"id": "ff42878f-c30e-4cd9-af66-1ac3a9ad0f6e", "address": "fa:16:3e:7a:6e:d1", "network": {"id": "88187e54-b99b-4328-bc20-35f889cb52f5", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-402678145-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bba47d6853164b709160fe935e827841", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "471f65a5-21ea-45e3-a722-4e204ed65673", "external-id": "nsx-vlan-transportzone-139", "segmentation_id": 139, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff42878f-c3", "ovs_interfaceid": "ff42878f-c30e-4cd9-af66-1ac3a9ad0f6e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 660.009317] env[68040]: DEBUG oslo_concurrency.lockutils [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.392s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 660.009791] env[68040]: DEBUG nova.compute.manager [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Start building networks asynchronously for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 660.018458] env[68040]: DEBUG oslo_concurrency.lockutils [req-f03329bf-321d-413b-a843-525a7704c592 req-54229e54-784b-4038-8f21-1d00e9a3cfd3 service nova] Releasing lock "refresh_cache-7374c1a5-1b4f-4026-b885-bf0eb12a850e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 660.018743] env[68040]: DEBUG nova.compute.manager [req-f03329bf-321d-413b-a843-525a7704c592 req-54229e54-784b-4038-8f21-1d00e9a3cfd3 service nova] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Received event network-vif-plugged-57121e1d-ce6b-4afe-b723-0d0e8e6f6051 {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 660.018890] env[68040]: DEBUG oslo_concurrency.lockutils [req-f03329bf-321d-413b-a843-525a7704c592 req-54229e54-784b-4038-8f21-1d00e9a3cfd3 service nova] Acquiring lock "f23e3529-19a6-4562-ae9b-591d1a452385-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 660.020076] env[68040]: DEBUG oslo_concurrency.lockutils [req-f03329bf-321d-413b-a843-525a7704c592 req-54229e54-784b-4038-8f21-1d00e9a3cfd3 service nova] Lock "f23e3529-19a6-4562-ae9b-591d1a452385-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 660.020076] env[68040]: DEBUG oslo_concurrency.lockutils [req-f03329bf-321d-413b-a843-525a7704c592 req-54229e54-784b-4038-8f21-1d00e9a3cfd3 service nova] Lock "f23e3529-19a6-4562-ae9b-591d1a452385-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 660.020312] env[68040]: DEBUG nova.compute.manager [req-f03329bf-321d-413b-a843-525a7704c592 req-54229e54-784b-4038-8f21-1d00e9a3cfd3 service nova] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] No waiting events found dispatching network-vif-plugged-57121e1d-ce6b-4afe-b723-0d0e8e6f6051 {{(pid=68040) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 660.020413] env[68040]: WARNING nova.compute.manager [req-f03329bf-321d-413b-a843-525a7704c592 req-54229e54-784b-4038-8f21-1d00e9a3cfd3 service nova] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Received unexpected event network-vif-plugged-57121e1d-ce6b-4afe-b723-0d0e8e6f6051 for instance with vm_state building and task_state spawning. [ 660.020576] env[68040]: DEBUG nova.compute.manager [req-f03329bf-321d-413b-a843-525a7704c592 req-54229e54-784b-4038-8f21-1d00e9a3cfd3 service nova] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Received event network-changed-57121e1d-ce6b-4afe-b723-0d0e8e6f6051 {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 660.020730] env[68040]: DEBUG nova.compute.manager [req-f03329bf-321d-413b-a843-525a7704c592 req-54229e54-784b-4038-8f21-1d00e9a3cfd3 service nova] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Refreshing instance network info cache due to event network-changed-57121e1d-ce6b-4afe-b723-0d0e8e6f6051. {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 660.020918] env[68040]: DEBUG oslo_concurrency.lockutils [req-f03329bf-321d-413b-a843-525a7704c592 req-54229e54-784b-4038-8f21-1d00e9a3cfd3 service nova] Acquiring lock "refresh_cache-f23e3529-19a6-4562-ae9b-591d1a452385" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 660.021507] env[68040]: DEBUG oslo_concurrency.lockutils [req-f03329bf-321d-413b-a843-525a7704c592 req-54229e54-784b-4038-8f21-1d00e9a3cfd3 service nova] Acquired lock "refresh_cache-f23e3529-19a6-4562-ae9b-591d1a452385" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 660.021507] env[68040]: DEBUG nova.network.neutron [req-f03329bf-321d-413b-a843-525a7704c592 req-54229e54-784b-4038-8f21-1d00e9a3cfd3 service nova] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Refreshing network info cache for port 57121e1d-ce6b-4afe-b723-0d0e8e6f6051 {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 660.060995] env[68040]: DEBUG nova.compute.utils [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Using /dev/sd instead of None {{(pid=68040) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 660.066151] env[68040]: DEBUG nova.compute.manager [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Allocating IP information in the background. {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 660.066151] env[68040]: DEBUG nova.network.neutron [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 97b050ff-2997-4504-8787-04f1221251b8] allocate_for_instance() {{(pid=68040) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 660.078739] env[68040]: DEBUG nova.compute.manager [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Start building block device mappings for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 660.177707] env[68040]: DEBUG nova.compute.manager [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Start spawning the instance on the hypervisor. {{(pid=68040) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 660.203037] env[68040]: DEBUG nova.virt.hardware [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:59:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:59:33Z,direct_url=,disk_format='vmdk',id=8c308313-03d5-40b6-a5fe-9037e32dc76e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0770d674a39c40089de0aade9440b370',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:59:34Z,virtual_size=,visibility=), allow threads: False {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 660.203037] env[68040]: DEBUG nova.virt.hardware [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Flavor limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 660.203037] env[68040]: DEBUG nova.virt.hardware [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Image limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 660.203244] env[68040]: DEBUG nova.virt.hardware [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Flavor pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 660.203244] env[68040]: DEBUG nova.virt.hardware [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Image pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 660.203244] env[68040]: DEBUG nova.virt.hardware [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 660.203355] env[68040]: DEBUG nova.virt.hardware [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 660.203512] env[68040]: DEBUG nova.virt.hardware [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 660.203674] env[68040]: DEBUG nova.virt.hardware [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Got 1 possible topologies {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 660.203857] env[68040]: DEBUG nova.virt.hardware [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 660.204252] env[68040]: DEBUG nova.virt.hardware [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 660.205149] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cb275c8-5fcf-48f0-a08a-07496e4346f2 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.213886] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20758a2a-42b4-4ae8-9362-eb2c08319a0b {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.328105] env[68040]: DEBUG nova.policy [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '697d3700fa134bc1af625faea8349c1c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dd6e3befaffa492c8eb487ac87c42785', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68040) authorize /opt/stack/nova/nova/policy.py:203}} [ 660.989815] env[68040]: DEBUG nova.network.neutron [req-4e520af1-537a-4f31-b089-48086b0cee37 req-8b233978-e2c2-4c1b-9748-1f1b32d7bd67 service nova] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Updated VIF entry in instance network info cache for port 63eb2087-bb53-4d97-bceb-bfee13ccc78c. {{(pid=68040) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 660.990134] env[68040]: DEBUG nova.network.neutron [req-4e520af1-537a-4f31-b089-48086b0cee37 req-8b233978-e2c2-4c1b-9748-1f1b32d7bd67 service nova] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Updating instance_info_cache with network_info: [{"id": "63eb2087-bb53-4d97-bceb-bfee13ccc78c", "address": "fa:16:3e:9a:d6:09", "network": {"id": "ca1a84af-ab33-497c-8767-fd4463c076be", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.230", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "0770d674a39c40089de0aade9440b370", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63eb2087-bb", "ovs_interfaceid": "63eb2087-bb53-4d97-bceb-bfee13ccc78c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 661.005654] env[68040]: DEBUG oslo_concurrency.lockutils [req-4e520af1-537a-4f31-b089-48086b0cee37 req-8b233978-e2c2-4c1b-9748-1f1b32d7bd67 service nova] Releasing lock "refresh_cache-4cc61343-486f-466c-9881-1a6856c82748" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 661.156974] env[68040]: DEBUG nova.network.neutron [req-f03329bf-321d-413b-a843-525a7704c592 req-54229e54-784b-4038-8f21-1d00e9a3cfd3 service nova] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Updated VIF entry in instance network info cache for port 57121e1d-ce6b-4afe-b723-0d0e8e6f6051. {{(pid=68040) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 661.156974] env[68040]: DEBUG nova.network.neutron [req-f03329bf-321d-413b-a843-525a7704c592 req-54229e54-784b-4038-8f21-1d00e9a3cfd3 service nova] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Updating instance_info_cache with network_info: [{"id": "57121e1d-ce6b-4afe-b723-0d0e8e6f6051", "address": "fa:16:3e:02:f3:87", "network": {"id": "75323b07-43f8-4219-ae49-9d83d10b9ae8", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-971332642-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7c339e0a904a43e99c5fe8bb6da01be1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d054505-89d3-49c5-8b38-5da917a42c49", "external-id": "nsx-vlan-transportzone-888", "segmentation_id": 888, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57121e1d-ce", "ovs_interfaceid": "57121e1d-ce6b-4afe-b723-0d0e8e6f6051", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 661.171814] env[68040]: DEBUG oslo_concurrency.lockutils [req-f03329bf-321d-413b-a843-525a7704c592 req-54229e54-784b-4038-8f21-1d00e9a3cfd3 service nova] Releasing lock "refresh_cache-f23e3529-19a6-4562-ae9b-591d1a452385" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 661.348687] env[68040]: DEBUG nova.network.neutron [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Successfully created port: a538d604-9220-441c-a9c9-4fde99e4da79 {{(pid=68040) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 661.547636] env[68040]: DEBUG nova.network.neutron [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] Successfully updated port: 45a2eb6d-a522-4966-a710-81ffcaf1c461 {{(pid=68040) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 661.586104] env[68040]: DEBUG oslo_concurrency.lockutils [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Acquiring lock "refresh_cache-3411cba3-71c9-4334-bc79-4e322f4231f1" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 661.586104] env[68040]: DEBUG oslo_concurrency.lockutils [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Acquired lock "refresh_cache-3411cba3-71c9-4334-bc79-4e322f4231f1" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 661.586104] env[68040]: DEBUG nova.network.neutron [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] Building network info cache for instance {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 661.660384] env[68040]: DEBUG oslo_concurrency.lockutils [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Acquiring lock "b81d413c-2449-471a-b3d9-693fc0ab2824" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 661.662380] env[68040]: DEBUG oslo_concurrency.lockutils [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Lock "b81d413c-2449-471a-b3d9-693fc0ab2824" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 661.676672] env[68040]: DEBUG nova.compute.manager [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 661.758109] env[68040]: DEBUG oslo_concurrency.lockutils [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 661.758765] env[68040]: DEBUG oslo_concurrency.lockutils [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 661.762052] env[68040]: INFO nova.compute.claims [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 661.926948] env[68040]: DEBUG nova.network.neutron [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 661.982090] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70e8ba7e-7198-4ca3-bc86-32f743f16052 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.991356] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-707c254e-c9eb-40fd-97e3-8029e29b2387 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.025947] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e630d5cd-beb7-4c6f-85d1-b044ddf03d34 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.033226] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f5e1b7b-67a6-422c-a0b8-b7461fae5ba9 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.046840] env[68040]: DEBUG nova.compute.provider_tree [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 662.063504] env[68040]: DEBUG nova.scheduler.client.report [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 662.085033] env[68040]: DEBUG oslo_concurrency.lockutils [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.326s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 662.085350] env[68040]: DEBUG nova.compute.manager [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Start building networks asynchronously for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 662.123745] env[68040]: DEBUG nova.compute.utils [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Using /dev/sd instead of None {{(pid=68040) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 662.125396] env[68040]: DEBUG nova.compute.manager [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Allocating IP information in the background. {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 662.125598] env[68040]: DEBUG nova.network.neutron [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] allocate_for_instance() {{(pid=68040) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 662.138660] env[68040]: DEBUG nova.compute.manager [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Start building block device mappings for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 662.225237] env[68040]: DEBUG nova.compute.manager [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Start spawning the instance on the hypervisor. {{(pid=68040) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 662.255491] env[68040]: DEBUG nova.virt.hardware [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:59:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:59:33Z,direct_url=,disk_format='vmdk',id=8c308313-03d5-40b6-a5fe-9037e32dc76e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0770d674a39c40089de0aade9440b370',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:59:34Z,virtual_size=,visibility=), allow threads: False {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 662.255491] env[68040]: DEBUG nova.virt.hardware [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Flavor limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 662.255723] env[68040]: DEBUG nova.virt.hardware [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Image limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 662.255723] env[68040]: DEBUG nova.virt.hardware [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Flavor pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 662.255834] env[68040]: DEBUG nova.virt.hardware [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Image pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 662.255986] env[68040]: DEBUG nova.virt.hardware [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 662.256438] env[68040]: DEBUG nova.virt.hardware [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 662.256823] env[68040]: DEBUG nova.virt.hardware [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 662.256823] env[68040]: DEBUG nova.virt.hardware [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Got 1 possible topologies {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 662.256942] env[68040]: DEBUG nova.virt.hardware [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 662.257342] env[68040]: DEBUG nova.virt.hardware [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 662.258457] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b51aa34-5f0c-4bda-847c-74a9d83f2ea7 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.268507] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb72b79d-7772-4b0a-abce-98a1a6857f6a {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.433311] env[68040]: DEBUG nova.policy [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'febf1fb3bcda4bf0953ecd17adba56ab', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '08258560746e46db9b856799c3743168', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68040) authorize /opt/stack/nova/nova/policy.py:203}} [ 662.669020] env[68040]: DEBUG nova.network.neutron [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] Updating instance_info_cache with network_info: [{"id": "45a2eb6d-a522-4966-a710-81ffcaf1c461", "address": "fa:16:3e:fb:00:45", "network": {"id": "a756dafe-b794-4ec3-8dc3-4d5d5fdb3ddf", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-659578877-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd6e3befaffa492c8eb487ac87c42785", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31ac3fea-ebf4-4bed-bf70-1eaecdf71280", "external-id": "nsx-vlan-transportzone-489", "segmentation_id": 489, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45a2eb6d-a5", "ovs_interfaceid": "45a2eb6d-a522-4966-a710-81ffcaf1c461", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 662.691919] env[68040]: DEBUG oslo_concurrency.lockutils [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Releasing lock "refresh_cache-3411cba3-71c9-4334-bc79-4e322f4231f1" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 662.692273] env[68040]: DEBUG nova.compute.manager [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] Instance network_info: |[{"id": "45a2eb6d-a522-4966-a710-81ffcaf1c461", "address": "fa:16:3e:fb:00:45", "network": {"id": "a756dafe-b794-4ec3-8dc3-4d5d5fdb3ddf", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-659578877-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd6e3befaffa492c8eb487ac87c42785", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31ac3fea-ebf4-4bed-bf70-1eaecdf71280", "external-id": "nsx-vlan-transportzone-489", "segmentation_id": 489, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45a2eb6d-a5", "ovs_interfaceid": "45a2eb6d-a522-4966-a710-81ffcaf1c461", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 662.693239] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fb:00:45', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '31ac3fea-ebf4-4bed-bf70-1eaecdf71280', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '45a2eb6d-a522-4966-a710-81ffcaf1c461', 'vif_model': 'vmxnet3'}] {{(pid=68040) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 662.702969] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Creating folder: Project (dd6e3befaffa492c8eb487ac87c42785). Parent ref: group-v639956. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 662.703594] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b6bccf2c-1b23-4516-ba84-027df87a3c6c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.714962] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Created folder: Project (dd6e3befaffa492c8eb487ac87c42785) in parent group-v639956. [ 662.715171] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Creating folder: Instances. Parent ref: group-v639975. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 662.715410] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1f417526-13eb-450c-af7d-d89d9fcf7c70 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.726876] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Created folder: Instances in parent group-v639975. [ 662.727219] env[68040]: DEBUG oslo.service.loopingcall [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 662.727442] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] Creating VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 662.727622] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-368b2adf-7c43-4e68-a384-6a1dfef5a136 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.752194] env[68040]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 662.752194] env[68040]: value = "task-3200161" [ 662.752194] env[68040]: _type = "Task" [ 662.752194] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.761158] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200161, 'name': CreateVM_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.265795] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200161, 'name': CreateVM_Task, 'duration_secs': 0.349655} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.266260] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] Created VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 663.266642] env[68040]: DEBUG oslo_concurrency.lockutils [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 663.266814] env[68040]: DEBUG oslo_concurrency.lockutils [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 663.267238] env[68040]: DEBUG oslo_concurrency.lockutils [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 663.267496] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e6e227a-be04-4036-bd0a-07be95963f5c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.274033] env[68040]: DEBUG oslo_vmware.api [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Waiting for the task: (returnval){ [ 663.274033] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52acc4ea-1f23-ed5e-6fc2-76206586da1d" [ 663.274033] env[68040]: _type = "Task" [ 663.274033] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.281702] env[68040]: DEBUG oslo_vmware.api [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52acc4ea-1f23-ed5e-6fc2-76206586da1d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.320742] env[68040]: DEBUG nova.network.neutron [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Successfully updated port: a538d604-9220-441c-a9c9-4fde99e4da79 {{(pid=68040) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 663.331897] env[68040]: DEBUG oslo_concurrency.lockutils [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Acquiring lock "refresh_cache-97b050ff-2997-4504-8787-04f1221251b8" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 663.332067] env[68040]: DEBUG oslo_concurrency.lockutils [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Acquired lock "refresh_cache-97b050ff-2997-4504-8787-04f1221251b8" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 663.332351] env[68040]: DEBUG nova.network.neutron [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Building network info cache for instance {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 663.510174] env[68040]: DEBUG nova.network.neutron [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 663.646073] env[68040]: DEBUG nova.compute.manager [req-1799ce33-399f-4368-ad80-e745bbec27fe req-0972e184-1109-42ca-8238-34e96370eac1 service nova] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Received event network-vif-plugged-669990b8-9fa9-4d05-bcd1-ec726196c14f {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 663.646180] env[68040]: DEBUG oslo_concurrency.lockutils [req-1799ce33-399f-4368-ad80-e745bbec27fe req-0972e184-1109-42ca-8238-34e96370eac1 service nova] Acquiring lock "17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 663.646389] env[68040]: DEBUG oslo_concurrency.lockutils [req-1799ce33-399f-4368-ad80-e745bbec27fe req-0972e184-1109-42ca-8238-34e96370eac1 service nova] Lock "17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 663.646549] env[68040]: DEBUG oslo_concurrency.lockutils [req-1799ce33-399f-4368-ad80-e745bbec27fe req-0972e184-1109-42ca-8238-34e96370eac1 service nova] Lock "17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 663.646751] env[68040]: DEBUG nova.compute.manager [req-1799ce33-399f-4368-ad80-e745bbec27fe req-0972e184-1109-42ca-8238-34e96370eac1 service nova] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] No waiting events found dispatching network-vif-plugged-669990b8-9fa9-4d05-bcd1-ec726196c14f {{(pid=68040) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 663.647165] env[68040]: WARNING nova.compute.manager [req-1799ce33-399f-4368-ad80-e745bbec27fe req-0972e184-1109-42ca-8238-34e96370eac1 service nova] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Received unexpected event network-vif-plugged-669990b8-9fa9-4d05-bcd1-ec726196c14f for instance with vm_state building and task_state spawning. [ 663.647216] env[68040]: DEBUG nova.compute.manager [req-1799ce33-399f-4368-ad80-e745bbec27fe req-0972e184-1109-42ca-8238-34e96370eac1 service nova] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Received event network-changed-669990b8-9fa9-4d05-bcd1-ec726196c14f {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 663.647566] env[68040]: DEBUG nova.compute.manager [req-1799ce33-399f-4368-ad80-e745bbec27fe req-0972e184-1109-42ca-8238-34e96370eac1 service nova] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Refreshing instance network info cache due to event network-changed-669990b8-9fa9-4d05-bcd1-ec726196c14f. {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 663.647952] env[68040]: DEBUG oslo_concurrency.lockutils [req-1799ce33-399f-4368-ad80-e745bbec27fe req-0972e184-1109-42ca-8238-34e96370eac1 service nova] Acquiring lock "refresh_cache-17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 663.651549] env[68040]: DEBUG oslo_concurrency.lockutils [req-1799ce33-399f-4368-ad80-e745bbec27fe req-0972e184-1109-42ca-8238-34e96370eac1 service nova] Acquired lock "refresh_cache-17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 663.652141] env[68040]: DEBUG nova.network.neutron [req-1799ce33-399f-4368-ad80-e745bbec27fe req-0972e184-1109-42ca-8238-34e96370eac1 service nova] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Refreshing network info cache for port 669990b8-9fa9-4d05-bcd1-ec726196c14f {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 663.786906] env[68040]: DEBUG oslo_concurrency.lockutils [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 663.786906] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] Processing image 8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 663.786906] env[68040]: DEBUG oslo_concurrency.lockutils [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 663.929039] env[68040]: DEBUG nova.network.neutron [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Successfully created port: 1b25545b-a909-4dc7-84fd-75829b9051d7 {{(pid=68040) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 663.950231] env[68040]: DEBUG oslo_concurrency.lockutils [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Acquiring lock "39de4e78-44cd-4582-998e-88ce6de2d51c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 663.950526] env[68040]: DEBUG oslo_concurrency.lockutils [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Lock "39de4e78-44cd-4582-998e-88ce6de2d51c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 663.962907] env[68040]: DEBUG nova.compute.manager [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 664.038529] env[68040]: DEBUG oslo_concurrency.lockutils [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 664.039361] env[68040]: DEBUG oslo_concurrency.lockutils [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 664.042052] env[68040]: INFO nova.compute.claims [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 664.274391] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-728f1221-2e07-495f-8343-5cc64d0bff48 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.287318] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-942d802a-b538-4235-853a-4dba8e05e8f5 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.327306] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6394dd40-2dee-4c6f-a346-60e96450ad37 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.336233] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c54ae4e-c874-4a9e-9351-275fe20199ff {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.351930] env[68040]: DEBUG nova.compute.provider_tree [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 664.367502] env[68040]: DEBUG nova.scheduler.client.report [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 664.386244] env[68040]: DEBUG oslo_concurrency.lockutils [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.347s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 664.387012] env[68040]: DEBUG nova.compute.manager [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Start building networks asynchronously for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 664.446895] env[68040]: DEBUG nova.compute.utils [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Using /dev/sd instead of None {{(pid=68040) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 664.450139] env[68040]: DEBUG nova.compute.manager [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Allocating IP information in the background. {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 664.450139] env[68040]: DEBUG nova.network.neutron [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] allocate_for_instance() {{(pid=68040) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 664.465455] env[68040]: DEBUG nova.compute.manager [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Start building block device mappings for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 664.469828] env[68040]: DEBUG nova.network.neutron [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Updating instance_info_cache with network_info: [{"id": "a538d604-9220-441c-a9c9-4fde99e4da79", "address": "fa:16:3e:c5:24:4d", "network": {"id": "a756dafe-b794-4ec3-8dc3-4d5d5fdb3ddf", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-659578877-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd6e3befaffa492c8eb487ac87c42785", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31ac3fea-ebf4-4bed-bf70-1eaecdf71280", "external-id": "nsx-vlan-transportzone-489", "segmentation_id": 489, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa538d604-92", "ovs_interfaceid": "a538d604-9220-441c-a9c9-4fde99e4da79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 664.493553] env[68040]: DEBUG oslo_concurrency.lockutils [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Releasing lock "refresh_cache-97b050ff-2997-4504-8787-04f1221251b8" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 664.493553] env[68040]: DEBUG nova.compute.manager [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Instance network_info: |[{"id": "a538d604-9220-441c-a9c9-4fde99e4da79", "address": "fa:16:3e:c5:24:4d", "network": {"id": "a756dafe-b794-4ec3-8dc3-4d5d5fdb3ddf", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-659578877-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd6e3befaffa492c8eb487ac87c42785", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31ac3fea-ebf4-4bed-bf70-1eaecdf71280", "external-id": "nsx-vlan-transportzone-489", "segmentation_id": 489, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa538d604-92", "ovs_interfaceid": "a538d604-9220-441c-a9c9-4fde99e4da79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 664.493680] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c5:24:4d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '31ac3fea-ebf4-4bed-bf70-1eaecdf71280', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a538d604-9220-441c-a9c9-4fde99e4da79', 'vif_model': 'vmxnet3'}] {{(pid=68040) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 664.503705] env[68040]: DEBUG oslo.service.loopingcall [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 664.504583] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Creating VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 664.506018] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6826ad2c-bf1e-405c-a2eb-9cf8b251a428 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.533653] env[68040]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 664.533653] env[68040]: value = "task-3200162" [ 664.533653] env[68040]: _type = "Task" [ 664.533653] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.542832] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200162, 'name': CreateVM_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.565472] env[68040]: DEBUG nova.compute.manager [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Start spawning the instance on the hypervisor. {{(pid=68040) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 664.598523] env[68040]: DEBUG nova.virt.hardware [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:59:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:59:33Z,direct_url=,disk_format='vmdk',id=8c308313-03d5-40b6-a5fe-9037e32dc76e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0770d674a39c40089de0aade9440b370',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:59:34Z,virtual_size=,visibility=), allow threads: False {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 664.598774] env[68040]: DEBUG nova.virt.hardware [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Flavor limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 664.598959] env[68040]: DEBUG nova.virt.hardware [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Image limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 664.599212] env[68040]: DEBUG nova.virt.hardware [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Flavor pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 664.599381] env[68040]: DEBUG nova.virt.hardware [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Image pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 664.599540] env[68040]: DEBUG nova.virt.hardware [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 664.599976] env[68040]: DEBUG nova.virt.hardware [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 664.600180] env[68040]: DEBUG nova.virt.hardware [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 664.600370] env[68040]: DEBUG nova.virt.hardware [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Got 1 possible topologies {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 664.600642] env[68040]: DEBUG nova.virt.hardware [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 664.600860] env[68040]: DEBUG nova.virt.hardware [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 664.602221] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa7b0d3c-6f60-41ff-8ce3-72d34a52aedb {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.610610] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3d98c0e-6d00-4c5a-8f77-56e0fde71f4c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.868397] env[68040]: DEBUG nova.policy [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f61b0b9c7681417bb77230b5bd2edd1c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '86b4fcc6c2634ee8acdb7a4b5fd129ed', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68040) authorize /opt/stack/nova/nova/policy.py:203}} [ 665.046826] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200162, 'name': CreateVM_Task, 'duration_secs': 0.328099} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.047245] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Created VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 665.048365] env[68040]: DEBUG oslo_concurrency.lockutils [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 665.049939] env[68040]: DEBUG oslo_concurrency.lockutils [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 665.049939] env[68040]: DEBUG oslo_concurrency.lockutils [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 665.049939] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21c1bdc9-4112-446c-ba0d-84431deb67aa {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.054800] env[68040]: DEBUG oslo_vmware.api [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Waiting for the task: (returnval){ [ 665.054800] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]5210def2-6c4e-5d0d-e336-e1d1286e156d" [ 665.054800] env[68040]: _type = "Task" [ 665.054800] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.066429] env[68040]: DEBUG oslo_vmware.api [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]5210def2-6c4e-5d0d-e336-e1d1286e156d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.225036] env[68040]: DEBUG nova.network.neutron [req-1799ce33-399f-4368-ad80-e745bbec27fe req-0972e184-1109-42ca-8238-34e96370eac1 service nova] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Updated VIF entry in instance network info cache for port 669990b8-9fa9-4d05-bcd1-ec726196c14f. {{(pid=68040) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 665.225036] env[68040]: DEBUG nova.network.neutron [req-1799ce33-399f-4368-ad80-e745bbec27fe req-0972e184-1109-42ca-8238-34e96370eac1 service nova] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Updating instance_info_cache with network_info: [{"id": "669990b8-9fa9-4d05-bcd1-ec726196c14f", "address": "fa:16:3e:93:51:82", "network": {"id": "ca1a84af-ab33-497c-8767-fd4463c076be", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.87", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "0770d674a39c40089de0aade9440b370", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap669990b8-9f", "ovs_interfaceid": "669990b8-9fa9-4d05-bcd1-ec726196c14f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 665.243801] env[68040]: DEBUG oslo_concurrency.lockutils [req-1799ce33-399f-4368-ad80-e745bbec27fe req-0972e184-1109-42ca-8238-34e96370eac1 service nova] Releasing lock "refresh_cache-17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 665.244099] env[68040]: DEBUG nova.compute.manager [req-1799ce33-399f-4368-ad80-e745bbec27fe req-0972e184-1109-42ca-8238-34e96370eac1 service nova] [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] Received event network-vif-plugged-45a2eb6d-a522-4966-a710-81ffcaf1c461 {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 665.244451] env[68040]: DEBUG oslo_concurrency.lockutils [req-1799ce33-399f-4368-ad80-e745bbec27fe req-0972e184-1109-42ca-8238-34e96370eac1 service nova] Acquiring lock "3411cba3-71c9-4334-bc79-4e322f4231f1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 665.244537] env[68040]: DEBUG oslo_concurrency.lockutils [req-1799ce33-399f-4368-ad80-e745bbec27fe req-0972e184-1109-42ca-8238-34e96370eac1 service nova] Lock "3411cba3-71c9-4334-bc79-4e322f4231f1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 665.244635] env[68040]: DEBUG oslo_concurrency.lockutils [req-1799ce33-399f-4368-ad80-e745bbec27fe req-0972e184-1109-42ca-8238-34e96370eac1 service nova] Lock "3411cba3-71c9-4334-bc79-4e322f4231f1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 665.244791] env[68040]: DEBUG nova.compute.manager [req-1799ce33-399f-4368-ad80-e745bbec27fe req-0972e184-1109-42ca-8238-34e96370eac1 service nova] [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] No waiting events found dispatching network-vif-plugged-45a2eb6d-a522-4966-a710-81ffcaf1c461 {{(pid=68040) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 665.244949] env[68040]: WARNING nova.compute.manager [req-1799ce33-399f-4368-ad80-e745bbec27fe req-0972e184-1109-42ca-8238-34e96370eac1 service nova] [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] Received unexpected event network-vif-plugged-45a2eb6d-a522-4966-a710-81ffcaf1c461 for instance with vm_state building and task_state spawning. [ 665.312334] env[68040]: DEBUG nova.compute.manager [req-24059ca9-6fab-4f26-82ac-95afa46f2955 req-493153f1-1d2d-42a9-b29e-b776a38e170c service nova] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Received event network-vif-plugged-a538d604-9220-441c-a9c9-4fde99e4da79 {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 665.312578] env[68040]: DEBUG oslo_concurrency.lockutils [req-24059ca9-6fab-4f26-82ac-95afa46f2955 req-493153f1-1d2d-42a9-b29e-b776a38e170c service nova] Acquiring lock "97b050ff-2997-4504-8787-04f1221251b8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 665.313201] env[68040]: DEBUG oslo_concurrency.lockutils [req-24059ca9-6fab-4f26-82ac-95afa46f2955 req-493153f1-1d2d-42a9-b29e-b776a38e170c service nova] Lock "97b050ff-2997-4504-8787-04f1221251b8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 665.313201] env[68040]: DEBUG oslo_concurrency.lockutils [req-24059ca9-6fab-4f26-82ac-95afa46f2955 req-493153f1-1d2d-42a9-b29e-b776a38e170c service nova] Lock "97b050ff-2997-4504-8787-04f1221251b8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 665.313798] env[68040]: DEBUG nova.compute.manager [req-24059ca9-6fab-4f26-82ac-95afa46f2955 req-493153f1-1d2d-42a9-b29e-b776a38e170c service nova] [instance: 97b050ff-2997-4504-8787-04f1221251b8] No waiting events found dispatching network-vif-plugged-a538d604-9220-441c-a9c9-4fde99e4da79 {{(pid=68040) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 665.314078] env[68040]: WARNING nova.compute.manager [req-24059ca9-6fab-4f26-82ac-95afa46f2955 req-493153f1-1d2d-42a9-b29e-b776a38e170c service nova] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Received unexpected event network-vif-plugged-a538d604-9220-441c-a9c9-4fde99e4da79 for instance with vm_state building and task_state spawning. [ 665.579641] env[68040]: DEBUG oslo_concurrency.lockutils [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 665.579641] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Processing image 8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 665.579641] env[68040]: DEBUG oslo_concurrency.lockutils [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 666.335035] env[68040]: DEBUG nova.network.neutron [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Successfully updated port: 1b25545b-a909-4dc7-84fd-75829b9051d7 {{(pid=68040) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 666.346896] env[68040]: DEBUG oslo_concurrency.lockutils [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Acquiring lock "refresh_cache-b81d413c-2449-471a-b3d9-693fc0ab2824" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 666.347059] env[68040]: DEBUG oslo_concurrency.lockutils [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Acquired lock "refresh_cache-b81d413c-2449-471a-b3d9-693fc0ab2824" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 666.347207] env[68040]: DEBUG nova.network.neutron [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Building network info cache for instance {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 666.509408] env[68040]: DEBUG nova.network.neutron [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 667.053576] env[68040]: DEBUG nova.network.neutron [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Successfully created port: 27ee9d32-b97c-4079-8df0-51ce652963bf {{(pid=68040) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 667.167589] env[68040]: DEBUG nova.network.neutron [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Updating instance_info_cache with network_info: [{"id": "1b25545b-a909-4dc7-84fd-75829b9051d7", "address": "fa:16:3e:d5:45:94", "network": {"id": "e31cf319-12d1-47bf-9f71-0cef4887d941", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-971583385-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08258560746e46db9b856799c3743168", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "424fd631-4456-4ce2-8924-a2ed81d60bd6", "external-id": "nsx-vlan-transportzone-19", "segmentation_id": 19, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b25545b-a9", "ovs_interfaceid": "1b25545b-a909-4dc7-84fd-75829b9051d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 667.187833] env[68040]: DEBUG oslo_concurrency.lockutils [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Releasing lock "refresh_cache-b81d413c-2449-471a-b3d9-693fc0ab2824" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 667.188371] env[68040]: DEBUG nova.compute.manager [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Instance network_info: |[{"id": "1b25545b-a909-4dc7-84fd-75829b9051d7", "address": "fa:16:3e:d5:45:94", "network": {"id": "e31cf319-12d1-47bf-9f71-0cef4887d941", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-971583385-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08258560746e46db9b856799c3743168", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "424fd631-4456-4ce2-8924-a2ed81d60bd6", "external-id": "nsx-vlan-transportzone-19", "segmentation_id": 19, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b25545b-a9", "ovs_interfaceid": "1b25545b-a909-4dc7-84fd-75829b9051d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 667.188988] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d5:45:94', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '424fd631-4456-4ce2-8924-a2ed81d60bd6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1b25545b-a909-4dc7-84fd-75829b9051d7', 'vif_model': 'vmxnet3'}] {{(pid=68040) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 667.201909] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Creating folder: Project (08258560746e46db9b856799c3743168). Parent ref: group-v639956. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 667.203823] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-975f2b66-9ee8-45a2-b80e-f29e64a751da {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.218879] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Created folder: Project (08258560746e46db9b856799c3743168) in parent group-v639956. [ 667.219112] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Creating folder: Instances. Parent ref: group-v639979. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 667.219355] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2a9757b4-95c6-4cad-9f3c-0295c2fa1ef0 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.229182] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Created folder: Instances in parent group-v639979. [ 667.229938] env[68040]: DEBUG oslo.service.loopingcall [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 667.230401] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Creating VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 667.230924] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1d4cd205-77c5-4927-90c6-d16a36b1cab9 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.261187] env[68040]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 667.261187] env[68040]: value = "task-3200165" [ 667.261187] env[68040]: _type = "Task" [ 667.261187] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.269240] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200165, 'name': CreateVM_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.768337] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200165, 'name': CreateVM_Task, 'duration_secs': 0.301733} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.768571] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Created VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 667.769393] env[68040]: DEBUG oslo_concurrency.lockutils [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 667.769393] env[68040]: DEBUG oslo_concurrency.lockutils [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 667.769859] env[68040]: DEBUG oslo_concurrency.lockutils [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 667.769914] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45a39006-4c43-43e3-a886-b8b9b92f7275 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.775025] env[68040]: DEBUG oslo_vmware.api [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Waiting for the task: (returnval){ [ 667.775025] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]5239327c-2c98-9e5f-9a18-19286c6c527b" [ 667.775025] env[68040]: _type = "Task" [ 667.775025] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.783052] env[68040]: DEBUG oslo_vmware.api [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]5239327c-2c98-9e5f-9a18-19286c6c527b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.223772] env[68040]: DEBUG nova.compute.manager [req-8911af2b-02ab-4c52-b479-9eccd58e357a req-d5f6651a-f4fe-4ea7-b506-16195c6ac188 service nova] [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] Received event network-changed-45a2eb6d-a522-4966-a710-81ffcaf1c461 {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 668.224029] env[68040]: DEBUG nova.compute.manager [req-8911af2b-02ab-4c52-b479-9eccd58e357a req-d5f6651a-f4fe-4ea7-b506-16195c6ac188 service nova] [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] Refreshing instance network info cache due to event network-changed-45a2eb6d-a522-4966-a710-81ffcaf1c461. {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 668.224262] env[68040]: DEBUG oslo_concurrency.lockutils [req-8911af2b-02ab-4c52-b479-9eccd58e357a req-d5f6651a-f4fe-4ea7-b506-16195c6ac188 service nova] Acquiring lock "refresh_cache-3411cba3-71c9-4334-bc79-4e322f4231f1" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 668.224410] env[68040]: DEBUG oslo_concurrency.lockutils [req-8911af2b-02ab-4c52-b479-9eccd58e357a req-d5f6651a-f4fe-4ea7-b506-16195c6ac188 service nova] Acquired lock "refresh_cache-3411cba3-71c9-4334-bc79-4e322f4231f1" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 668.224572] env[68040]: DEBUG nova.network.neutron [req-8911af2b-02ab-4c52-b479-9eccd58e357a req-d5f6651a-f4fe-4ea7-b506-16195c6ac188 service nova] [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] Refreshing network info cache for port 45a2eb6d-a522-4966-a710-81ffcaf1c461 {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 668.292600] env[68040]: DEBUG oslo_concurrency.lockutils [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 668.292683] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Processing image 8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 668.292879] env[68040]: DEBUG oslo_concurrency.lockutils [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 668.335621] env[68040]: DEBUG oslo_concurrency.lockutils [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Acquiring lock "a89ff564-ea35-4000-8efa-2c1ec2b61759" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 668.335621] env[68040]: DEBUG oslo_concurrency.lockutils [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Lock "a89ff564-ea35-4000-8efa-2c1ec2b61759" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 668.908022] env[68040]: DEBUG oslo_concurrency.lockutils [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Acquiring lock "42f39352-e703-4ebf-9559-4c8b5abca70e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 668.908330] env[68040]: DEBUG oslo_concurrency.lockutils [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Lock "42f39352-e703-4ebf-9559-4c8b5abca70e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 668.994074] env[68040]: DEBUG oslo_concurrency.lockutils [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Acquiring lock "d1819f29-a891-47dd-a456-8f3b127daf6f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 668.994391] env[68040]: DEBUG oslo_concurrency.lockutils [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Lock "d1819f29-a891-47dd-a456-8f3b127daf6f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 669.420480] env[68040]: DEBUG nova.network.neutron [req-8911af2b-02ab-4c52-b479-9eccd58e357a req-d5f6651a-f4fe-4ea7-b506-16195c6ac188 service nova] [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] Updated VIF entry in instance network info cache for port 45a2eb6d-a522-4966-a710-81ffcaf1c461. {{(pid=68040) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 669.420480] env[68040]: DEBUG nova.network.neutron [req-8911af2b-02ab-4c52-b479-9eccd58e357a req-d5f6651a-f4fe-4ea7-b506-16195c6ac188 service nova] [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] Updating instance_info_cache with network_info: [{"id": "45a2eb6d-a522-4966-a710-81ffcaf1c461", "address": "fa:16:3e:fb:00:45", "network": {"id": "a756dafe-b794-4ec3-8dc3-4d5d5fdb3ddf", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-659578877-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd6e3befaffa492c8eb487ac87c42785", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31ac3fea-ebf4-4bed-bf70-1eaecdf71280", "external-id": "nsx-vlan-transportzone-489", "segmentation_id": 489, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45a2eb6d-a5", "ovs_interfaceid": "45a2eb6d-a522-4966-a710-81ffcaf1c461", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 669.433159] env[68040]: DEBUG oslo_concurrency.lockutils [req-8911af2b-02ab-4c52-b479-9eccd58e357a req-d5f6651a-f4fe-4ea7-b506-16195c6ac188 service nova] Releasing lock "refresh_cache-3411cba3-71c9-4334-bc79-4e322f4231f1" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 669.535902] env[68040]: DEBUG nova.network.neutron [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Successfully updated port: 27ee9d32-b97c-4079-8df0-51ce652963bf {{(pid=68040) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 669.555214] env[68040]: DEBUG oslo_concurrency.lockutils [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Acquiring lock "refresh_cache-39de4e78-44cd-4582-998e-88ce6de2d51c" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 669.555307] env[68040]: DEBUG oslo_concurrency.lockutils [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Acquired lock "refresh_cache-39de4e78-44cd-4582-998e-88ce6de2d51c" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 669.557359] env[68040]: DEBUG nova.network.neutron [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Building network info cache for instance {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 669.683612] env[68040]: DEBUG nova.network.neutron [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 670.191971] env[68040]: DEBUG nova.compute.manager [req-a8260887-b4b9-41d3-a8bc-98ea6df06fe9 req-f735491e-3452-4ffd-9461-9e8afa4ebc53 service nova] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Received event network-changed-a538d604-9220-441c-a9c9-4fde99e4da79 {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 670.192296] env[68040]: DEBUG nova.compute.manager [req-a8260887-b4b9-41d3-a8bc-98ea6df06fe9 req-f735491e-3452-4ffd-9461-9e8afa4ebc53 service nova] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Refreshing instance network info cache due to event network-changed-a538d604-9220-441c-a9c9-4fde99e4da79. {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 670.192445] env[68040]: DEBUG oslo_concurrency.lockutils [req-a8260887-b4b9-41d3-a8bc-98ea6df06fe9 req-f735491e-3452-4ffd-9461-9e8afa4ebc53 service nova] Acquiring lock "refresh_cache-97b050ff-2997-4504-8787-04f1221251b8" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 670.192591] env[68040]: DEBUG oslo_concurrency.lockutils [req-a8260887-b4b9-41d3-a8bc-98ea6df06fe9 req-f735491e-3452-4ffd-9461-9e8afa4ebc53 service nova] Acquired lock "refresh_cache-97b050ff-2997-4504-8787-04f1221251b8" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 670.192756] env[68040]: DEBUG nova.network.neutron [req-a8260887-b4b9-41d3-a8bc-98ea6df06fe9 req-f735491e-3452-4ffd-9461-9e8afa4ebc53 service nova] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Refreshing network info cache for port a538d604-9220-441c-a9c9-4fde99e4da79 {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 670.501719] env[68040]: DEBUG nova.network.neutron [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Updating instance_info_cache with network_info: [{"id": "27ee9d32-b97c-4079-8df0-51ce652963bf", "address": "fa:16:3e:f7:e0:ff", "network": {"id": "ca1a84af-ab33-497c-8767-fd4463c076be", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.179", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "0770d674a39c40089de0aade9440b370", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27ee9d32-b9", "ovs_interfaceid": "27ee9d32-b97c-4079-8df0-51ce652963bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 670.526561] env[68040]: DEBUG oslo_concurrency.lockutils [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Releasing lock "refresh_cache-39de4e78-44cd-4582-998e-88ce6de2d51c" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 670.526561] env[68040]: DEBUG nova.compute.manager [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Instance network_info: |[{"id": "27ee9d32-b97c-4079-8df0-51ce652963bf", "address": "fa:16:3e:f7:e0:ff", "network": {"id": "ca1a84af-ab33-497c-8767-fd4463c076be", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.179", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "0770d674a39c40089de0aade9440b370", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27ee9d32-b9", "ovs_interfaceid": "27ee9d32-b97c-4079-8df0-51ce652963bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 670.526870] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f7:e0:ff', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7e0240aa-a694-48fc-a0f9-6f2d3e71aa12', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '27ee9d32-b97c-4079-8df0-51ce652963bf', 'vif_model': 'vmxnet3'}] {{(pid=68040) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 670.534677] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Creating folder: Project (86b4fcc6c2634ee8acdb7a4b5fd129ed). Parent ref: group-v639956. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 670.536030] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-027a8656-9cec-432f-a853-6bff17132773 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.546300] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Created folder: Project (86b4fcc6c2634ee8acdb7a4b5fd129ed) in parent group-v639956. [ 670.546554] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Creating folder: Instances. Parent ref: group-v639982. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 670.546809] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6ace1731-dafb-44aa-ae59-bb4ec724a350 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.556308] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Created folder: Instances in parent group-v639982. [ 670.556308] env[68040]: DEBUG oslo.service.loopingcall [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 670.556308] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Creating VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 670.556550] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a7d9dcfe-7307-489d-bd17-90792be15afe {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.583850] env[68040]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 670.583850] env[68040]: value = "task-3200168" [ 670.583850] env[68040]: _type = "Task" [ 670.583850] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.594176] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200168, 'name': CreateVM_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.919887] env[68040]: DEBUG nova.network.neutron [req-a8260887-b4b9-41d3-a8bc-98ea6df06fe9 req-f735491e-3452-4ffd-9461-9e8afa4ebc53 service nova] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Updated VIF entry in instance network info cache for port a538d604-9220-441c-a9c9-4fde99e4da79. {{(pid=68040) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 670.920324] env[68040]: DEBUG nova.network.neutron [req-a8260887-b4b9-41d3-a8bc-98ea6df06fe9 req-f735491e-3452-4ffd-9461-9e8afa4ebc53 service nova] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Updating instance_info_cache with network_info: [{"id": "a538d604-9220-441c-a9c9-4fde99e4da79", "address": "fa:16:3e:c5:24:4d", "network": {"id": "a756dafe-b794-4ec3-8dc3-4d5d5fdb3ddf", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-659578877-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd6e3befaffa492c8eb487ac87c42785", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31ac3fea-ebf4-4bed-bf70-1eaecdf71280", "external-id": "nsx-vlan-transportzone-489", "segmentation_id": 489, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa538d604-92", "ovs_interfaceid": "a538d604-9220-441c-a9c9-4fde99e4da79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 670.932329] env[68040]: DEBUG oslo_concurrency.lockutils [req-a8260887-b4b9-41d3-a8bc-98ea6df06fe9 req-f735491e-3452-4ffd-9461-9e8afa4ebc53 service nova] Releasing lock "refresh_cache-97b050ff-2997-4504-8787-04f1221251b8" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 670.932566] env[68040]: DEBUG nova.compute.manager [req-a8260887-b4b9-41d3-a8bc-98ea6df06fe9 req-f735491e-3452-4ffd-9461-9e8afa4ebc53 service nova] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Received event network-vif-plugged-1b25545b-a909-4dc7-84fd-75829b9051d7 {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 670.932770] env[68040]: DEBUG oslo_concurrency.lockutils [req-a8260887-b4b9-41d3-a8bc-98ea6df06fe9 req-f735491e-3452-4ffd-9461-9e8afa4ebc53 service nova] Acquiring lock "b81d413c-2449-471a-b3d9-693fc0ab2824-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 670.932970] env[68040]: DEBUG oslo_concurrency.lockutils [req-a8260887-b4b9-41d3-a8bc-98ea6df06fe9 req-f735491e-3452-4ffd-9461-9e8afa4ebc53 service nova] Lock "b81d413c-2449-471a-b3d9-693fc0ab2824-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 670.933168] env[68040]: DEBUG oslo_concurrency.lockutils [req-a8260887-b4b9-41d3-a8bc-98ea6df06fe9 req-f735491e-3452-4ffd-9461-9e8afa4ebc53 service nova] Lock "b81d413c-2449-471a-b3d9-693fc0ab2824-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 670.933341] env[68040]: DEBUG nova.compute.manager [req-a8260887-b4b9-41d3-a8bc-98ea6df06fe9 req-f735491e-3452-4ffd-9461-9e8afa4ebc53 service nova] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] No waiting events found dispatching network-vif-plugged-1b25545b-a909-4dc7-84fd-75829b9051d7 {{(pid=68040) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 670.933503] env[68040]: WARNING nova.compute.manager [req-a8260887-b4b9-41d3-a8bc-98ea6df06fe9 req-f735491e-3452-4ffd-9461-9e8afa4ebc53 service nova] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Received unexpected event network-vif-plugged-1b25545b-a909-4dc7-84fd-75829b9051d7 for instance with vm_state building and task_state spawning. [ 670.933661] env[68040]: DEBUG nova.compute.manager [req-a8260887-b4b9-41d3-a8bc-98ea6df06fe9 req-f735491e-3452-4ffd-9461-9e8afa4ebc53 service nova] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Received event network-changed-1b25545b-a909-4dc7-84fd-75829b9051d7 {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 670.933813] env[68040]: DEBUG nova.compute.manager [req-a8260887-b4b9-41d3-a8bc-98ea6df06fe9 req-f735491e-3452-4ffd-9461-9e8afa4ebc53 service nova] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Refreshing instance network info cache due to event network-changed-1b25545b-a909-4dc7-84fd-75829b9051d7. {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 670.935065] env[68040]: DEBUG oslo_concurrency.lockutils [req-a8260887-b4b9-41d3-a8bc-98ea6df06fe9 req-f735491e-3452-4ffd-9461-9e8afa4ebc53 service nova] Acquiring lock "refresh_cache-b81d413c-2449-471a-b3d9-693fc0ab2824" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 670.935687] env[68040]: DEBUG oslo_concurrency.lockutils [req-a8260887-b4b9-41d3-a8bc-98ea6df06fe9 req-f735491e-3452-4ffd-9461-9e8afa4ebc53 service nova] Acquired lock "refresh_cache-b81d413c-2449-471a-b3d9-693fc0ab2824" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 670.935877] env[68040]: DEBUG nova.network.neutron [req-a8260887-b4b9-41d3-a8bc-98ea6df06fe9 req-f735491e-3452-4ffd-9461-9e8afa4ebc53 service nova] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Refreshing network info cache for port 1b25545b-a909-4dc7-84fd-75829b9051d7 {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 671.017103] env[68040]: DEBUG oslo_concurrency.lockutils [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Acquiring lock "1812f13e-b03d-48d4-940a-43974784265b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 671.017469] env[68040]: DEBUG oslo_concurrency.lockutils [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Lock "1812f13e-b03d-48d4-940a-43974784265b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 671.098231] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200168, 'name': CreateVM_Task, 'duration_secs': 0.338236} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.098231] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Created VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 671.099703] env[68040]: DEBUG oslo_concurrency.lockutils [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 671.099919] env[68040]: DEBUG oslo_concurrency.lockutils [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 671.102531] env[68040]: DEBUG oslo_concurrency.lockutils [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 671.104143] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-19bf96b1-d343-417f-9e80-ed7525694b03 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.114170] env[68040]: DEBUG oslo_vmware.api [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Waiting for the task: (returnval){ [ 671.114170] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52dccb99-e19e-b8b1-26b2-191073902cc3" [ 671.114170] env[68040]: _type = "Task" [ 671.114170] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.126208] env[68040]: DEBUG oslo_vmware.api [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52dccb99-e19e-b8b1-26b2-191073902cc3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.624943] env[68040]: DEBUG oslo_concurrency.lockutils [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 671.625335] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Processing image 8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 671.625375] env[68040]: DEBUG oslo_concurrency.lockutils [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 671.639300] env[68040]: DEBUG nova.network.neutron [req-a8260887-b4b9-41d3-a8bc-98ea6df06fe9 req-f735491e-3452-4ffd-9461-9e8afa4ebc53 service nova] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Updated VIF entry in instance network info cache for port 1b25545b-a909-4dc7-84fd-75829b9051d7. {{(pid=68040) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 671.640549] env[68040]: DEBUG nova.network.neutron [req-a8260887-b4b9-41d3-a8bc-98ea6df06fe9 req-f735491e-3452-4ffd-9461-9e8afa4ebc53 service nova] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Updating instance_info_cache with network_info: [{"id": "1b25545b-a909-4dc7-84fd-75829b9051d7", "address": "fa:16:3e:d5:45:94", "network": {"id": "e31cf319-12d1-47bf-9f71-0cef4887d941", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-971583385-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "08258560746e46db9b856799c3743168", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "424fd631-4456-4ce2-8924-a2ed81d60bd6", "external-id": "nsx-vlan-transportzone-19", "segmentation_id": 19, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b25545b-a9", "ovs_interfaceid": "1b25545b-a909-4dc7-84fd-75829b9051d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 671.652650] env[68040]: DEBUG oslo_concurrency.lockutils [req-a8260887-b4b9-41d3-a8bc-98ea6df06fe9 req-f735491e-3452-4ffd-9461-9e8afa4ebc53 service nova] Releasing lock "refresh_cache-b81d413c-2449-471a-b3d9-693fc0ab2824" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 672.176513] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a5908c10-87e2-4461-8947-c87989df5100 tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Acquiring lock "a7853da1-b00c-4b05-8f4a-f928fcb59cb2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 672.176716] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a5908c10-87e2-4461-8947-c87989df5100 tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Lock "a7853da1-b00c-4b05-8f4a-f928fcb59cb2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 674.562030] env[68040]: DEBUG nova.compute.manager [req-9a0c7aa1-31c7-4156-8079-c61733453069 req-6840b9dc-8a67-4553-aeb2-b29394b2bf9c service nova] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Received event network-vif-plugged-27ee9d32-b97c-4079-8df0-51ce652963bf {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 674.562030] env[68040]: DEBUG oslo_concurrency.lockutils [req-9a0c7aa1-31c7-4156-8079-c61733453069 req-6840b9dc-8a67-4553-aeb2-b29394b2bf9c service nova] Acquiring lock "39de4e78-44cd-4582-998e-88ce6de2d51c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 674.564678] env[68040]: DEBUG oslo_concurrency.lockutils [req-9a0c7aa1-31c7-4156-8079-c61733453069 req-6840b9dc-8a67-4553-aeb2-b29394b2bf9c service nova] Lock "39de4e78-44cd-4582-998e-88ce6de2d51c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.003s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 674.564885] env[68040]: DEBUG oslo_concurrency.lockutils [req-9a0c7aa1-31c7-4156-8079-c61733453069 req-6840b9dc-8a67-4553-aeb2-b29394b2bf9c service nova] Lock "39de4e78-44cd-4582-998e-88ce6de2d51c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 674.565114] env[68040]: DEBUG nova.compute.manager [req-9a0c7aa1-31c7-4156-8079-c61733453069 req-6840b9dc-8a67-4553-aeb2-b29394b2bf9c service nova] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] No waiting events found dispatching network-vif-plugged-27ee9d32-b97c-4079-8df0-51ce652963bf {{(pid=68040) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 674.568018] env[68040]: WARNING nova.compute.manager [req-9a0c7aa1-31c7-4156-8079-c61733453069 req-6840b9dc-8a67-4553-aeb2-b29394b2bf9c service nova] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Received unexpected event network-vif-plugged-27ee9d32-b97c-4079-8df0-51ce652963bf for instance with vm_state building and task_state spawning. [ 674.568018] env[68040]: DEBUG nova.compute.manager [req-9a0c7aa1-31c7-4156-8079-c61733453069 req-6840b9dc-8a67-4553-aeb2-b29394b2bf9c service nova] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Received event network-changed-27ee9d32-b97c-4079-8df0-51ce652963bf {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 674.568018] env[68040]: DEBUG nova.compute.manager [req-9a0c7aa1-31c7-4156-8079-c61733453069 req-6840b9dc-8a67-4553-aeb2-b29394b2bf9c service nova] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Refreshing instance network info cache due to event network-changed-27ee9d32-b97c-4079-8df0-51ce652963bf. {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 674.568018] env[68040]: DEBUG oslo_concurrency.lockutils [req-9a0c7aa1-31c7-4156-8079-c61733453069 req-6840b9dc-8a67-4553-aeb2-b29394b2bf9c service nova] Acquiring lock "refresh_cache-39de4e78-44cd-4582-998e-88ce6de2d51c" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 674.568018] env[68040]: DEBUG oslo_concurrency.lockutils [req-9a0c7aa1-31c7-4156-8079-c61733453069 req-6840b9dc-8a67-4553-aeb2-b29394b2bf9c service nova] Acquired lock "refresh_cache-39de4e78-44cd-4582-998e-88ce6de2d51c" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 674.568441] env[68040]: DEBUG nova.network.neutron [req-9a0c7aa1-31c7-4156-8079-c61733453069 req-6840b9dc-8a67-4553-aeb2-b29394b2bf9c service nova] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Refreshing network info cache for port 27ee9d32-b97c-4079-8df0-51ce652963bf {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 675.181493] env[68040]: DEBUG nova.network.neutron [req-9a0c7aa1-31c7-4156-8079-c61733453069 req-6840b9dc-8a67-4553-aeb2-b29394b2bf9c service nova] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Updated VIF entry in instance network info cache for port 27ee9d32-b97c-4079-8df0-51ce652963bf. {{(pid=68040) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 675.181822] env[68040]: DEBUG nova.network.neutron [req-9a0c7aa1-31c7-4156-8079-c61733453069 req-6840b9dc-8a67-4553-aeb2-b29394b2bf9c service nova] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Updating instance_info_cache with network_info: [{"id": "27ee9d32-b97c-4079-8df0-51ce652963bf", "address": "fa:16:3e:f7:e0:ff", "network": {"id": "ca1a84af-ab33-497c-8767-fd4463c076be", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.179", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "0770d674a39c40089de0aade9440b370", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27ee9d32-b9", "ovs_interfaceid": "27ee9d32-b97c-4079-8df0-51ce652963bf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 675.197046] env[68040]: DEBUG oslo_concurrency.lockutils [req-9a0c7aa1-31c7-4156-8079-c61733453069 req-6840b9dc-8a67-4553-aeb2-b29394b2bf9c service nova] Releasing lock "refresh_cache-39de4e78-44cd-4582-998e-88ce6de2d51c" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 675.736468] env[68040]: DEBUG oslo_concurrency.lockutils [None req-9f583b64-9d44-4375-801a-6272c17e1723 tempest-TenantUsagesTestJSON-4403897 tempest-TenantUsagesTestJSON-4403897-project-member] Acquiring lock "4e0f3617-aef9-4d66-8243-ff530b4084cd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 675.737228] env[68040]: DEBUG oslo_concurrency.lockutils [None req-9f583b64-9d44-4375-801a-6272c17e1723 tempest-TenantUsagesTestJSON-4403897 tempest-TenantUsagesTestJSON-4403897-project-member] Lock "4e0f3617-aef9-4d66-8243-ff530b4084cd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 676.766067] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a224b37e-6527-43da-bde8-66de547e9a8b tempest-ImagesOneServerTestJSON-475436907 tempest-ImagesOneServerTestJSON-475436907-project-member] Acquiring lock "28fd3e76-1a9b-4273-b951-e50a8506a9bc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 676.766353] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a224b37e-6527-43da-bde8-66de547e9a8b tempest-ImagesOneServerTestJSON-475436907 tempest-ImagesOneServerTestJSON-475436907-project-member] Lock "28fd3e76-1a9b-4273-b951-e50a8506a9bc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 676.850754] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a4e9dc12-89b9-46c4-a6e5-bc2dfa6d256f tempest-VolumesAssistedSnapshotsTest-914443928 tempest-VolumesAssistedSnapshotsTest-914443928-project-member] Acquiring lock "810b9f3c-7a92-40ab-8630-5c1ad6e4762c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 676.851035] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a4e9dc12-89b9-46c4-a6e5-bc2dfa6d256f tempest-VolumesAssistedSnapshotsTest-914443928 tempest-VolumesAssistedSnapshotsTest-914443928-project-member] Lock "810b9f3c-7a92-40ab-8630-5c1ad6e4762c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 678.369510] env[68040]: DEBUG oslo_concurrency.lockutils [None req-f2820da5-a2ae-477a-a262-ab33174148d0 tempest-ServersWithSpecificFlavorTestJSON-83255541 tempest-ServersWithSpecificFlavorTestJSON-83255541-project-member] Acquiring lock "0a9103d6-2461-4ed3-93fa-a0149ccc5267" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 678.369920] env[68040]: DEBUG oslo_concurrency.lockutils [None req-f2820da5-a2ae-477a-a262-ab33174148d0 tempest-ServersWithSpecificFlavorTestJSON-83255541 tempest-ServersWithSpecificFlavorTestJSON-83255541-project-member] Lock "0a9103d6-2461-4ed3-93fa-a0149ccc5267" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 679.611087] env[68040]: DEBUG oslo_concurrency.lockutils [None req-028640e1-c0bb-4655-9d8a-614aac499dac tempest-ServerActionsTestOtherA-519480154 tempest-ServerActionsTestOtherA-519480154-project-member] Acquiring lock "0a0d385d-3255-4755-8987-a26cd28006cb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 679.611497] env[68040]: DEBUG oslo_concurrency.lockutils [None req-028640e1-c0bb-4655-9d8a-614aac499dac tempest-ServerActionsTestOtherA-519480154 tempest-ServerActionsTestOtherA-519480154-project-member] Lock "0a0d385d-3255-4755-8987-a26cd28006cb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 683.936193] env[68040]: DEBUG oslo_concurrency.lockutils [None req-76302229-e0af-42eb-9319-827f9c2429cf tempest-ServerDiagnosticsNegativeTest-1386750677 tempest-ServerDiagnosticsNegativeTest-1386750677-project-member] Acquiring lock "0cc0463e-1e55-4dd5-96b2-ee15025e689b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 683.936511] env[68040]: DEBUG oslo_concurrency.lockutils [None req-76302229-e0af-42eb-9319-827f9c2429cf tempest-ServerDiagnosticsNegativeTest-1386750677 tempest-ServerDiagnosticsNegativeTest-1386750677-project-member] Lock "0cc0463e-1e55-4dd5-96b2-ee15025e689b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 699.530113] env[68040]: WARNING oslo_vmware.rw_handles [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 699.530113] env[68040]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 699.530113] env[68040]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 699.530113] env[68040]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 699.530113] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 699.530113] env[68040]: ERROR oslo_vmware.rw_handles response.begin() [ 699.530113] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 699.530113] env[68040]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 699.530113] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 699.530113] env[68040]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 699.530113] env[68040]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 699.530113] env[68040]: ERROR oslo_vmware.rw_handles [ 699.531241] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] Downloaded image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to vmware_temp/c0df2a27-8967-46c4-be78-8179c02f0386/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 699.532013] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] Caching image {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 699.533362] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Copying Virtual Disk [datastore2] vmware_temp/c0df2a27-8967-46c4-be78-8179c02f0386/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk to [datastore2] vmware_temp/c0df2a27-8967-46c4-be78-8179c02f0386/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk {{(pid=68040) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 699.534520] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-895434d8-c24b-4171-8f93-a25f0b1024b3 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.541428] env[68040]: DEBUG oslo_vmware.api [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Waiting for the task: (returnval){ [ 699.541428] env[68040]: value = "task-3200173" [ 699.541428] env[68040]: _type = "Task" [ 699.541428] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.550105] env[68040]: DEBUG oslo_vmware.api [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Task: {'id': task-3200173, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.052612] env[68040]: DEBUG oslo_vmware.exceptions [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Fault InvalidArgument not matched. {{(pid=68040) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 700.054967] env[68040]: DEBUG oslo_concurrency.lockutils [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 700.057064] env[68040]: ERROR nova.compute.manager [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 700.057064] env[68040]: Faults: ['InvalidArgument'] [ 700.057064] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] Traceback (most recent call last): [ 700.057064] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 700.057064] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] yield resources [ 700.057064] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 700.057064] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] self.driver.spawn(context, instance, image_meta, [ 700.057064] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 700.057064] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] self._vmops.spawn(context, instance, image_meta, injected_files, [ 700.057064] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 700.057064] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] self._fetch_image_if_missing(context, vi) [ 700.057064] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 700.057564] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] image_cache(vi, tmp_image_ds_loc) [ 700.057564] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 700.057564] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] vm_util.copy_virtual_disk( [ 700.057564] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 700.057564] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] session._wait_for_task(vmdk_copy_task) [ 700.057564] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 700.057564] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] return self.wait_for_task(task_ref) [ 700.057564] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 700.057564] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] return evt.wait() [ 700.057564] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 700.057564] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] result = hub.switch() [ 700.057564] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 700.057564] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] return self.greenlet.switch() [ 700.058110] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 700.058110] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] self.f(*self.args, **self.kw) [ 700.058110] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 700.058110] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] raise exceptions.translate_fault(task_info.error) [ 700.058110] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 700.058110] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] Faults: ['InvalidArgument'] [ 700.058110] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] [ 700.058110] env[68040]: INFO nova.compute.manager [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] Terminating instance [ 700.059786] env[68040]: DEBUG oslo_concurrency.lockutils [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 700.060013] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 700.061662] env[68040]: DEBUG nova.compute.manager [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 700.061888] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 700.062219] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1b5d7c99-6af9-436b-b7be-bd88046a4291 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.066253] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c55b131-9c6f-4406-81b0-868c01fd498d {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.074226] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] Unregistering the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 700.074654] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b2722601-9aa5-4c94-a44d-63bb39865470 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.081704] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 700.082204] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68040) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 700.082562] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e163b96-b50f-4f4b-86eb-b8395aceda00 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.088211] env[68040]: DEBUG oslo_vmware.api [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Waiting for the task: (returnval){ [ 700.088211] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52ce82ea-3f1a-59c7-d51e-d29e2b821d8e" [ 700.088211] env[68040]: _type = "Task" [ 700.088211] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.105739] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] Preparing fetch location {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 700.106270] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Creating directory with path [datastore2] vmware_temp/3068b196-7b16-4d0a-a2a2-fcc526150658/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 700.106698] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c437a2ba-26cc-46be-a303-da925381aa3f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.155920] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Created directory with path [datastore2] vmware_temp/3068b196-7b16-4d0a-a2a2-fcc526150658/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 700.155920] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] Fetch image to [datastore2] vmware_temp/3068b196-7b16-4d0a-a2a2-fcc526150658/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 700.156177] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to [datastore2] vmware_temp/3068b196-7b16-4d0a-a2a2-fcc526150658/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 700.159090] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c85517b2-854b-4fbc-826a-26210efc8b35 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.165489] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a90e19f8-f543-4ddf-8fb9-7e975eb12a0e {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.169841] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] Unregistered the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 700.170132] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] Deleting contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 700.170386] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Deleting the datastore file [datastore2] 467ffaac-0414-4bed-af2c-d0939d90ba79 {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 700.171032] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7aacfe00-f431-4ef4-bd21-461cdadd7681 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.183675] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85acf2f8-de8c-437a-a135-383769f7f832 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.190198] env[68040]: DEBUG oslo_vmware.api [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Waiting for the task: (returnval){ [ 700.190198] env[68040]: value = "task-3200176" [ 700.190198] env[68040]: _type = "Task" [ 700.190198] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.225895] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcba4953-5704-4dca-b07b-503c341e6038 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.232076] env[68040]: DEBUG oslo_vmware.api [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Task: {'id': task-3200176, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.235591] env[68040]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-334d85c0-2f4e-4def-8781-439d478e82f2 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.261523] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 700.342992] env[68040]: DEBUG oslo_vmware.rw_handles [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3068b196-7b16-4d0a-a2a2-fcc526150658/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 700.411076] env[68040]: DEBUG oslo_vmware.rw_handles [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Completed reading data from the image iterator. {{(pid=68040) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 700.411076] env[68040]: DEBUG oslo_vmware.rw_handles [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3068b196-7b16-4d0a-a2a2-fcc526150658/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 700.703447] env[68040]: DEBUG oslo_vmware.api [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Task: {'id': task-3200176, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.23409} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.703729] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Deleted the datastore file {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 700.704240] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] Deleted contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 700.704240] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 700.706694] env[68040]: INFO nova.compute.manager [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] Took 0.64 seconds to destroy the instance on the hypervisor. [ 700.707352] env[68040]: DEBUG nova.compute.claims [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] Aborting claim: {{(pid=68040) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 700.707533] env[68040]: DEBUG oslo_concurrency.lockutils [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 700.707751] env[68040]: DEBUG oslo_concurrency.lockutils [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 701.145373] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21b372f1-4652-461c-8cec-212d8931392c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.155017] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-376d3be5-b512-4ac9-b284-341b33a48125 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.190266] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6630fba3-1cdd-4b92-8c49-0a8b087394d4 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.198986] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38b97586-e31b-49ef-918a-2bc8970db115 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.214506] env[68040]: DEBUG nova.compute.provider_tree [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Updating inventory in ProviderTree for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 701.256915] env[68040]: ERROR nova.scheduler.client.report [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] [req-ec5051a0-1142-48f3-9f87-a1497d097582] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 22db6f73-b3da-436a-bf40-9c8c240b2e44. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-ec5051a0-1142-48f3-9f87-a1497d097582"}]}: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 701.283809] env[68040]: DEBUG nova.scheduler.client.report [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Refreshing inventories for resource provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 701.306467] env[68040]: DEBUG nova.scheduler.client.report [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Updating ProviderTree inventory for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 701.306467] env[68040]: DEBUG nova.compute.provider_tree [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Updating inventory in ProviderTree for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 701.333091] env[68040]: DEBUG nova.scheduler.client.report [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Refreshing aggregate associations for resource provider 22db6f73-b3da-436a-bf40-9c8c240b2e44, aggregates: None {{(pid=68040) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 701.366779] env[68040]: DEBUG nova.scheduler.client.report [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Refreshing trait associations for resource provider 22db6f73-b3da-436a-bf40-9c8c240b2e44, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=68040) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 701.823813] env[68040]: DEBUG oslo_concurrency.lockutils [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Acquiring lock "de1b8ef9-0088-4d2a-985e-d04fcff55d31" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 701.823813] env[68040]: DEBUG oslo_concurrency.lockutils [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Lock "de1b8ef9-0088-4d2a-985e-d04fcff55d31" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 701.824326] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54659d89-fb77-4fa5-8439-4b6878e00e73 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.832838] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2de9eb56-93e7-42c1-bd08-c1b7e0f9d874 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.869030] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eaa37c9-2a22-4ce9-b042-c84753e62093 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.877375] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a2e76ed-6c25-4fc7-a7ca-83010371c6e9 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.895891] env[68040]: DEBUG nova.compute.provider_tree [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Updating inventory in ProviderTree for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 701.925700] env[68040]: ERROR nova.scheduler.client.report [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] [req-e2913c26-ae04-4699-af7f-e99eeb4a7b69] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 22db6f73-b3da-436a-bf40-9c8c240b2e44. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-e2913c26-ae04-4699-af7f-e99eeb4a7b69"}]}: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 701.958225] env[68040]: DEBUG nova.scheduler.client.report [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Refreshing inventories for resource provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 701.984703] env[68040]: DEBUG nova.scheduler.client.report [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Updating ProviderTree inventory for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 701.984703] env[68040]: DEBUG nova.compute.provider_tree [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Updating inventory in ProviderTree for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 126, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 702.005296] env[68040]: DEBUG nova.scheduler.client.report [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Refreshing aggregate associations for resource provider 22db6f73-b3da-436a-bf40-9c8c240b2e44, aggregates: None {{(pid=68040) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 702.036240] env[68040]: DEBUG nova.scheduler.client.report [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Refreshing trait associations for resource provider 22db6f73-b3da-436a-bf40-9c8c240b2e44, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=68040) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 702.464560] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14d9d62b-ea95-4223-af2c-4a23e93494ef {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.479359] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6091b61-f041-451e-948c-dbc363575e0d {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.526623] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96da3049-9a4a-4599-8591-0f819597cd81 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.538351] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf42e019-31f8-47ca-9b0c-f0c3b21d6639 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.556579] env[68040]: DEBUG nova.compute.provider_tree [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Updating inventory in ProviderTree for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 702.610619] env[68040]: DEBUG nova.scheduler.client.report [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Updated inventory for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 with generation 28 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:957}} [ 702.610619] env[68040]: DEBUG nova.compute.provider_tree [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Updating resource provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 generation from 28 to 29 during operation: update_inventory {{(pid=68040) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 702.610880] env[68040]: DEBUG nova.compute.provider_tree [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Updating inventory in ProviderTree for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 702.638796] env[68040]: DEBUG oslo_concurrency.lockutils [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.931s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 702.639291] env[68040]: ERROR nova.compute.manager [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 702.639291] env[68040]: Faults: ['InvalidArgument'] [ 702.639291] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] Traceback (most recent call last): [ 702.639291] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 702.639291] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] self.driver.spawn(context, instance, image_meta, [ 702.639291] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 702.639291] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] self._vmops.spawn(context, instance, image_meta, injected_files, [ 702.639291] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 702.639291] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] self._fetch_image_if_missing(context, vi) [ 702.639291] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 702.639291] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] image_cache(vi, tmp_image_ds_loc) [ 702.639291] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 702.639913] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] vm_util.copy_virtual_disk( [ 702.639913] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 702.639913] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] session._wait_for_task(vmdk_copy_task) [ 702.639913] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 702.639913] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] return self.wait_for_task(task_ref) [ 702.639913] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 702.639913] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] return evt.wait() [ 702.639913] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 702.639913] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] result = hub.switch() [ 702.639913] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 702.639913] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] return self.greenlet.switch() [ 702.639913] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 702.639913] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] self.f(*self.args, **self.kw) [ 702.641605] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 702.641605] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] raise exceptions.translate_fault(task_info.error) [ 702.641605] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 702.641605] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] Faults: ['InvalidArgument'] [ 702.641605] env[68040]: ERROR nova.compute.manager [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] [ 702.642468] env[68040]: DEBUG nova.compute.utils [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] VimFaultException {{(pid=68040) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 702.650690] env[68040]: DEBUG nova.compute.manager [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] Build of instance 467ffaac-0414-4bed-af2c-d0939d90ba79 was re-scheduled: A specified parameter was not correct: fileType [ 702.650690] env[68040]: Faults: ['InvalidArgument'] {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 702.650690] env[68040]: DEBUG nova.compute.manager [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] Unplugging VIFs for instance {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 702.650690] env[68040]: DEBUG nova.compute.manager [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 702.650899] env[68040]: DEBUG nova.compute.manager [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 702.650942] env[68040]: DEBUG nova.network.neutron [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 703.282327] env[68040]: DEBUG nova.network.neutron [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 703.303731] env[68040]: INFO nova.compute.manager [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] [instance: 467ffaac-0414-4bed-af2c-d0939d90ba79] Took 0.65 seconds to deallocate network for instance. [ 703.448725] env[68040]: INFO nova.scheduler.client.report [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Deleted allocations for instance 467ffaac-0414-4bed-af2c-d0939d90ba79 [ 703.494430] env[68040]: DEBUG oslo_concurrency.lockutils [None req-8b0127d9-c739-4b5e-9c23-ebf0ef64e673 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Lock "467ffaac-0414-4bed-af2c-d0939d90ba79" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 61.601s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 703.528809] env[68040]: DEBUG nova.compute.manager [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 703.623194] env[68040]: DEBUG oslo_concurrency.lockutils [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 703.623447] env[68040]: DEBUG oslo_concurrency.lockutils [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 703.626458] env[68040]: INFO nova.compute.claims [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 704.115880] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33a73f79-216d-433c-8196-ba0b7a6143ad {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.125317] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c6b2a27-c3d1-41d4-89a8-24de35cfdbea {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.163998] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6619cab1-8ca9-438e-be53-ae12c79059f9 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.174163] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3f2f063-675e-4a77-8489-4a921cf7d702 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.199084] env[68040]: DEBUG nova.compute.provider_tree [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 704.217031] env[68040]: DEBUG nova.scheduler.client.report [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 704.245590] env[68040]: DEBUG oslo_concurrency.lockutils [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.622s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 704.246121] env[68040]: DEBUG nova.compute.manager [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Start building networks asynchronously for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 704.319157] env[68040]: DEBUG nova.compute.utils [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Using /dev/sd instead of None {{(pid=68040) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 704.320550] env[68040]: DEBUG nova.compute.manager [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Allocating IP information in the background. {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 704.320691] env[68040]: DEBUG nova.network.neutron [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] allocate_for_instance() {{(pid=68040) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 704.337709] env[68040]: DEBUG nova.compute.manager [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Start building block device mappings for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 704.422533] env[68040]: DEBUG nova.compute.manager [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Start spawning the instance on the hypervisor. {{(pid=68040) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 704.471537] env[68040]: DEBUG nova.virt.hardware [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:59:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:59:33Z,direct_url=,disk_format='vmdk',id=8c308313-03d5-40b6-a5fe-9037e32dc76e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0770d674a39c40089de0aade9440b370',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:59:34Z,virtual_size=,visibility=), allow threads: False {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 704.471812] env[68040]: DEBUG nova.virt.hardware [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Flavor limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 704.471942] env[68040]: DEBUG nova.virt.hardware [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Image limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 704.472284] env[68040]: DEBUG nova.virt.hardware [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Flavor pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 704.472557] env[68040]: DEBUG nova.virt.hardware [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Image pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 704.472627] env[68040]: DEBUG nova.virt.hardware [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 704.472885] env[68040]: DEBUG nova.virt.hardware [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 704.473040] env[68040]: DEBUG nova.virt.hardware [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 704.473173] env[68040]: DEBUG nova.virt.hardware [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Got 1 possible topologies {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 704.473335] env[68040]: DEBUG nova.virt.hardware [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 704.473513] env[68040]: DEBUG nova.virt.hardware [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 704.474528] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baf3037b-f1a2-4b7e-b8d5-bdf2235cac05 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.489599] env[68040]: DEBUG oslo_concurrency.lockutils [None req-b2463724-8a2d-4877-a8db-40b9436ca215 tempest-ServerAddressesNegativeTestJSON-701301007 tempest-ServerAddressesNegativeTestJSON-701301007-project-member] Acquiring lock "e2a9808e-6da7-4e53-a6d3-d3144ecf158a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 704.489599] env[68040]: DEBUG oslo_concurrency.lockutils [None req-b2463724-8a2d-4877-a8db-40b9436ca215 tempest-ServerAddressesNegativeTestJSON-701301007 tempest-ServerAddressesNegativeTestJSON-701301007-project-member] Lock "e2a9808e-6da7-4e53-a6d3-d3144ecf158a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 704.494701] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1825817d-fd0a-4199-8584-a569c3586eff {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.655301] env[68040]: DEBUG nova.policy [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'db0a68801d4c492ca8681c35000ba7bc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '62f74fa8dd004aff9f32580a0a2cfc36', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68040) authorize /opt/stack/nova/nova/policy.py:203}} [ 705.526415] env[68040]: DEBUG nova.network.neutron [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Successfully created port: ac4f1355-d53a-409c-8310-c71099a7abd5 {{(pid=68040) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 705.715161] env[68040]: DEBUG oslo_concurrency.lockutils [None req-87c93415-2d61-408b-8737-7a532e9b629a tempest-MultipleCreateTestJSON-225064095 tempest-MultipleCreateTestJSON-225064095-project-member] Acquiring lock "ba480b6b-3d33-4f60-b045-21fe059fd0a6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 705.715161] env[68040]: DEBUG oslo_concurrency.lockutils [None req-87c93415-2d61-408b-8737-7a532e9b629a tempest-MultipleCreateTestJSON-225064095 tempest-MultipleCreateTestJSON-225064095-project-member] Lock "ba480b6b-3d33-4f60-b045-21fe059fd0a6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 705.759585] env[68040]: DEBUG oslo_concurrency.lockutils [None req-87c93415-2d61-408b-8737-7a532e9b629a tempest-MultipleCreateTestJSON-225064095 tempest-MultipleCreateTestJSON-225064095-project-member] Acquiring lock "0b61102f-1b2e-4962-b94b-d27d394c5aef" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 705.760646] env[68040]: DEBUG oslo_concurrency.lockutils [None req-87c93415-2d61-408b-8737-7a532e9b629a tempest-MultipleCreateTestJSON-225064095 tempest-MultipleCreateTestJSON-225064095-project-member] Lock "0b61102f-1b2e-4962-b94b-d27d394c5aef" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 706.392629] env[68040]: DEBUG nova.network.neutron [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Successfully updated port: ac4f1355-d53a-409c-8310-c71099a7abd5 {{(pid=68040) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 706.411169] env[68040]: DEBUG oslo_concurrency.lockutils [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Acquiring lock "refresh_cache-a89ff564-ea35-4000-8efa-2c1ec2b61759" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 706.411335] env[68040]: DEBUG oslo_concurrency.lockutils [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Acquired lock "refresh_cache-a89ff564-ea35-4000-8efa-2c1ec2b61759" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 706.411376] env[68040]: DEBUG nova.network.neutron [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Building network info cache for instance {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 706.478942] env[68040]: DEBUG nova.network.neutron [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 706.694129] env[68040]: DEBUG nova.network.neutron [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Updating instance_info_cache with network_info: [{"id": "ac4f1355-d53a-409c-8310-c71099a7abd5", "address": "fa:16:3e:36:e8:6b", "network": {"id": "7d52427b-27c6-4c9c-a2ef-be1fb44a3a4a", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1994465630-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "62f74fa8dd004aff9f32580a0a2cfc36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1316f5aa-529f-4bac-8dd7-6076a9d43312", "external-id": "nsx-vlan-transportzone-399", "segmentation_id": 399, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac4f1355-d5", "ovs_interfaceid": "ac4f1355-d53a-409c-8310-c71099a7abd5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 706.722391] env[68040]: DEBUG oslo_concurrency.lockutils [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Releasing lock "refresh_cache-a89ff564-ea35-4000-8efa-2c1ec2b61759" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 706.722750] env[68040]: DEBUG nova.compute.manager [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Instance network_info: |[{"id": "ac4f1355-d53a-409c-8310-c71099a7abd5", "address": "fa:16:3e:36:e8:6b", "network": {"id": "7d52427b-27c6-4c9c-a2ef-be1fb44a3a4a", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1994465630-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "62f74fa8dd004aff9f32580a0a2cfc36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1316f5aa-529f-4bac-8dd7-6076a9d43312", "external-id": "nsx-vlan-transportzone-399", "segmentation_id": 399, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac4f1355-d5", "ovs_interfaceid": "ac4f1355-d53a-409c-8310-c71099a7abd5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 706.723698] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:36:e8:6b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1316f5aa-529f-4bac-8dd7-6076a9d43312', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ac4f1355-d53a-409c-8310-c71099a7abd5', 'vif_model': 'vmxnet3'}] {{(pid=68040) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 706.734080] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Creating folder: Project (62f74fa8dd004aff9f32580a0a2cfc36). Parent ref: group-v639956. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 706.736855] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0db50665-f87a-4e73-804c-a3f085ad74dd {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.750065] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Created folder: Project (62f74fa8dd004aff9f32580a0a2cfc36) in parent group-v639956. [ 706.750332] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Creating folder: Instances. Parent ref: group-v639989. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 706.751420] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aa4ee736-b729-4124-b369-2c87c6197259 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.760410] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Created folder: Instances in parent group-v639989. [ 706.760675] env[68040]: DEBUG oslo.service.loopingcall [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 706.760869] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Creating VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 706.761097] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a6be6272-ae4c-4e70-a791-61b3949512a6 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.787141] env[68040]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 706.787141] env[68040]: value = "task-3200182" [ 706.787141] env[68040]: _type = "Task" [ 706.787141] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.798805] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200182, 'name': CreateVM_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.297887] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200182, 'name': CreateVM_Task, 'duration_secs': 0.335053} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.298141] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Created VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 707.298890] env[68040]: DEBUG oslo_concurrency.lockutils [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 707.299141] env[68040]: DEBUG oslo_concurrency.lockutils [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 707.299951] env[68040]: DEBUG oslo_concurrency.lockutils [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 707.299951] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f39b71e-bcb9-4c40-99bf-274d12949bf2 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.304260] env[68040]: DEBUG oslo_vmware.api [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Waiting for the task: (returnval){ [ 707.304260] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52929a69-edb4-164b-d839-f1722edb4664" [ 707.304260] env[68040]: _type = "Task" [ 707.304260] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.312167] env[68040]: DEBUG oslo_vmware.api [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52929a69-edb4-164b-d839-f1722edb4664, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.814717] env[68040]: DEBUG oslo_concurrency.lockutils [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 707.814717] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Processing image 8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 707.814717] env[68040]: DEBUG oslo_concurrency.lockutils [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 708.973539] env[68040]: DEBUG oslo_concurrency.lockutils [None req-b30a638a-87d2-412a-b0ca-4e522e603b4c tempest-ServersNegativeTestJSON-912418572 tempest-ServersNegativeTestJSON-912418572-project-member] Acquiring lock "856ec4dd-3a1d-4140-b3d5-52690cf87f92" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 708.973848] env[68040]: DEBUG oslo_concurrency.lockutils [None req-b30a638a-87d2-412a-b0ca-4e522e603b4c tempest-ServersNegativeTestJSON-912418572 tempest-ServersNegativeTestJSON-912418572-project-member] Lock "856ec4dd-3a1d-4140-b3d5-52690cf87f92" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 709.718216] env[68040]: DEBUG nova.compute.manager [req-ee094963-4d32-40bd-8d41-062c64ddf5ca req-5caaeead-aaf3-4dbd-b8c7-b7d590fd446e service nova] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Received event network-vif-plugged-ac4f1355-d53a-409c-8310-c71099a7abd5 {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 709.718216] env[68040]: DEBUG oslo_concurrency.lockutils [req-ee094963-4d32-40bd-8d41-062c64ddf5ca req-5caaeead-aaf3-4dbd-b8c7-b7d590fd446e service nova] Acquiring lock "a89ff564-ea35-4000-8efa-2c1ec2b61759-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 709.718216] env[68040]: DEBUG oslo_concurrency.lockutils [req-ee094963-4d32-40bd-8d41-062c64ddf5ca req-5caaeead-aaf3-4dbd-b8c7-b7d590fd446e service nova] Lock "a89ff564-ea35-4000-8efa-2c1ec2b61759-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 709.718627] env[68040]: DEBUG oslo_concurrency.lockutils [req-ee094963-4d32-40bd-8d41-062c64ddf5ca req-5caaeead-aaf3-4dbd-b8c7-b7d590fd446e service nova] Lock "a89ff564-ea35-4000-8efa-2c1ec2b61759-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 709.718862] env[68040]: DEBUG nova.compute.manager [req-ee094963-4d32-40bd-8d41-062c64ddf5ca req-5caaeead-aaf3-4dbd-b8c7-b7d590fd446e service nova] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] No waiting events found dispatching network-vif-plugged-ac4f1355-d53a-409c-8310-c71099a7abd5 {{(pid=68040) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 709.719688] env[68040]: WARNING nova.compute.manager [req-ee094963-4d32-40bd-8d41-062c64ddf5ca req-5caaeead-aaf3-4dbd-b8c7-b7d590fd446e service nova] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Received unexpected event network-vif-plugged-ac4f1355-d53a-409c-8310-c71099a7abd5 for instance with vm_state building and task_state spawning. [ 709.719748] env[68040]: DEBUG nova.compute.manager [req-ee094963-4d32-40bd-8d41-062c64ddf5ca req-5caaeead-aaf3-4dbd-b8c7-b7d590fd446e service nova] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Received event network-changed-ac4f1355-d53a-409c-8310-c71099a7abd5 {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 709.719886] env[68040]: DEBUG nova.compute.manager [req-ee094963-4d32-40bd-8d41-062c64ddf5ca req-5caaeead-aaf3-4dbd-b8c7-b7d590fd446e service nova] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Refreshing instance network info cache due to event network-changed-ac4f1355-d53a-409c-8310-c71099a7abd5. {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 709.720092] env[68040]: DEBUG oslo_concurrency.lockutils [req-ee094963-4d32-40bd-8d41-062c64ddf5ca req-5caaeead-aaf3-4dbd-b8c7-b7d590fd446e service nova] Acquiring lock "refresh_cache-a89ff564-ea35-4000-8efa-2c1ec2b61759" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 709.720228] env[68040]: DEBUG oslo_concurrency.lockutils [req-ee094963-4d32-40bd-8d41-062c64ddf5ca req-5caaeead-aaf3-4dbd-b8c7-b7d590fd446e service nova] Acquired lock "refresh_cache-a89ff564-ea35-4000-8efa-2c1ec2b61759" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 709.720579] env[68040]: DEBUG nova.network.neutron [req-ee094963-4d32-40bd-8d41-062c64ddf5ca req-5caaeead-aaf3-4dbd-b8c7-b7d590fd446e service nova] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Refreshing network info cache for port ac4f1355-d53a-409c-8310-c71099a7abd5 {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 710.026069] env[68040]: DEBUG nova.network.neutron [req-ee094963-4d32-40bd-8d41-062c64ddf5ca req-5caaeead-aaf3-4dbd-b8c7-b7d590fd446e service nova] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Updated VIF entry in instance network info cache for port ac4f1355-d53a-409c-8310-c71099a7abd5. {{(pid=68040) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 710.026069] env[68040]: DEBUG nova.network.neutron [req-ee094963-4d32-40bd-8d41-062c64ddf5ca req-5caaeead-aaf3-4dbd-b8c7-b7d590fd446e service nova] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Updating instance_info_cache with network_info: [{"id": "ac4f1355-d53a-409c-8310-c71099a7abd5", "address": "fa:16:3e:36:e8:6b", "network": {"id": "7d52427b-27c6-4c9c-a2ef-be1fb44a3a4a", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1994465630-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "62f74fa8dd004aff9f32580a0a2cfc36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1316f5aa-529f-4bac-8dd7-6076a9d43312", "external-id": "nsx-vlan-transportzone-399", "segmentation_id": 399, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac4f1355-d5", "ovs_interfaceid": "ac4f1355-d53a-409c-8310-c71099a7abd5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 710.038439] env[68040]: DEBUG oslo_concurrency.lockutils [req-ee094963-4d32-40bd-8d41-062c64ddf5ca req-5caaeead-aaf3-4dbd-b8c7-b7d590fd446e service nova] Releasing lock "refresh_cache-a89ff564-ea35-4000-8efa-2c1ec2b61759" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 710.827471] env[68040]: DEBUG oslo_concurrency.lockutils [None req-65a5caf1-356b-40c2-a6be-90ff3fd78b20 tempest-SecurityGroupsTestJSON-810175009 tempest-SecurityGroupsTestJSON-810175009-project-member] Acquiring lock "dae1bdde-e497-4ee6-9582-4988c5ae7a96" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 710.827471] env[68040]: DEBUG oslo_concurrency.lockutils [None req-65a5caf1-356b-40c2-a6be-90ff3fd78b20 tempest-SecurityGroupsTestJSON-810175009 tempest-SecurityGroupsTestJSON-810175009-project-member] Lock "dae1bdde-e497-4ee6-9582-4988c5ae7a96" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 712.094762] env[68040]: DEBUG oslo_concurrency.lockutils [None req-6a70d2e5-22e2-4c93-a852-ae7cba49af6f tempest-ServerPasswordTestJSON-1477827249 tempest-ServerPasswordTestJSON-1477827249-project-member] Acquiring lock "1b382d10-944a-4817-b959-c8ad2664309e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 712.095065] env[68040]: DEBUG oslo_concurrency.lockutils [None req-6a70d2e5-22e2-4c93-a852-ae7cba49af6f tempest-ServerPasswordTestJSON-1477827249 tempest-ServerPasswordTestJSON-1477827249-project-member] Lock "1b382d10-944a-4817-b959-c8ad2664309e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 712.191387] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ce3d29b4-4250-4781-8c88-e2937e3fe239 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Acquiring lock "37f5c20e-dbc4-46a3-a83f-c7329f7a764c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 712.191661] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ce3d29b4-4250-4781-8c88-e2937e3fe239 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Lock "37f5c20e-dbc4-46a3-a83f-c7329f7a764c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 713.179385] env[68040]: DEBUG oslo_concurrency.lockutils [None req-f69692d9-d7cf-4204-b329-dad0300d9d74 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Acquiring lock "25cb12ed-d0ed-402f-ba73-3c6c835adb17" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 713.179638] env[68040]: DEBUG oslo_concurrency.lockutils [None req-f69692d9-d7cf-4204-b329-dad0300d9d74 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Lock "25cb12ed-d0ed-402f-ba73-3c6c835adb17" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 713.191019] env[68040]: DEBUG oslo_concurrency.lockutils [None req-bb8a9998-523f-47d2-bf66-5a22cf5a5cd5 tempest-FloatingIPsAssociationNegativeTestJSON-1605724567 tempest-FloatingIPsAssociationNegativeTestJSON-1605724567-project-member] Acquiring lock "d42d2ca9-ac93-4efb-92a0-de248221dd43" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 713.191238] env[68040]: DEBUG oslo_concurrency.lockutils [None req-bb8a9998-523f-47d2-bf66-5a22cf5a5cd5 tempest-FloatingIPsAssociationNegativeTestJSON-1605724567 tempest-FloatingIPsAssociationNegativeTestJSON-1605724567-project-member] Lock "d42d2ca9-ac93-4efb-92a0-de248221dd43" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 717.091822] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ddc4c3b4-4a83-4085-90b6-f0bdea2e69a9 tempest-ServerActionsV293TestJSON-1697370989 tempest-ServerActionsV293TestJSON-1697370989-project-member] Acquiring lock "db379674-cc77-430b-bd6d-2f674d57a7ee" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 717.091822] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ddc4c3b4-4a83-4085-90b6-f0bdea2e69a9 tempest-ServerActionsV293TestJSON-1697370989 tempest-ServerActionsV293TestJSON-1697370989-project-member] Lock "db379674-cc77-430b-bd6d-2f674d57a7ee" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 719.610779] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 719.635546] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 719.985262] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 719.985262] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 720.985469] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 720.985759] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Starting heal instance info cache {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 720.985802] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Rebuilding the list of instances to heal {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 721.016452] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 721.016662] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 721.016829] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 721.017009] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 721.017182] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 721.017350] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 721.017558] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 721.017672] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 721.017856] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 721.018018] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 721.018201] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Didn't find any instances for network info cache update. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 721.019148] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 721.019409] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 721.019604] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 721.019789] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 721.020276] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68040) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 721.020488] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 721.037330] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 721.037487] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 721.037690] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 721.037894] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68040) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 721.038962] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44d4e987-2f0d-401c-a63c-e161de9a9b27 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.049509] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f342841c-d242-4b04-91c3-0475625056c5 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.069435] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-744968c3-e2c9-46ae-97a9-68d1921261c0 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.081331] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3ed3536-ee66-454c-b143-0f87cf95b6fc {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.116615] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181002MB free_disk=125GB free_vcpus=48 pci_devices=None {{(pid=68040) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 721.117052] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 721.117372] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 721.205125] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 7374c1a5-1b4f-4026-b885-bf0eb12a850e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 721.205125] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 4cc61343-486f-466c-9881-1a6856c82748 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 721.205125] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 8ae39d32-abb3-4e3e-8d2d-003eda60b136 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 721.205125] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance f23e3529-19a6-4562-ae9b-591d1a452385 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 721.205284] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 721.205284] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 3411cba3-71c9-4334-bc79-4e322f4231f1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 721.205284] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 97b050ff-2997-4504-8787-04f1221251b8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 721.205284] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance b81d413c-2449-471a-b3d9-693fc0ab2824 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 721.205411] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 39de4e78-44cd-4582-998e-88ce6de2d51c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 721.205411] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance a89ff564-ea35-4000-8efa-2c1ec2b61759 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 721.217795] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 42f39352-e703-4ebf-9559-4c8b5abca70e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 721.253252] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance d1819f29-a891-47dd-a456-8f3b127daf6f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 721.269272] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 1812f13e-b03d-48d4-940a-43974784265b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 721.282363] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance a7853da1-b00c-4b05-8f4a-f928fcb59cb2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 721.295190] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 4e0f3617-aef9-4d66-8243-ff530b4084cd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 721.309037] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 28fd3e76-1a9b-4273-b951-e50a8506a9bc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 721.322558] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 810b9f3c-7a92-40ab-8630-5c1ad6e4762c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 721.335508] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 0a9103d6-2461-4ed3-93fa-a0149ccc5267 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 721.348668] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 0a0d385d-3255-4755-8987-a26cd28006cb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 721.362843] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 0cc0463e-1e55-4dd5-96b2-ee15025e689b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 721.385019] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance de1b8ef9-0088-4d2a-985e-d04fcff55d31 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 721.395160] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance e2a9808e-6da7-4e53-a6d3-d3144ecf158a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 721.407832] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance ba480b6b-3d33-4f60-b045-21fe059fd0a6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 721.426281] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 0b61102f-1b2e-4962-b94b-d27d394c5aef has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 721.439904] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 856ec4dd-3a1d-4140-b3d5-52690cf87f92 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 721.453374] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance dae1bdde-e497-4ee6-9582-4988c5ae7a96 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 721.481334] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 1b382d10-944a-4817-b959-c8ad2664309e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 721.500351] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 37f5c20e-dbc4-46a3-a83f-c7329f7a764c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 721.514028] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 25cb12ed-d0ed-402f-ba73-3c6c835adb17 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 721.524952] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance d42d2ca9-ac93-4efb-92a0-de248221dd43 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 721.537797] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance db379674-cc77-430b-bd6d-2f674d57a7ee has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 721.537797] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 721.538028] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 722.029635] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7fc9fd7-4cc4-41b9-897e-a60904795193 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.038152] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2816ea1a-4f0e-4e59-95dd-30ef084dda60 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.070321] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-151fb3d1-1f44-409c-ae7d-f2ab0f716705 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.087812] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6cdc3e4-58a5-4c71-88f8-8d1dbfe43d3d {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.104040] env[68040]: DEBUG nova.compute.provider_tree [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 722.120197] env[68040]: DEBUG nova.scheduler.client.report [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 722.141837] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68040) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 722.142267] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.025s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 726.870937] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4b75365a-fb09-4842-8956-e8a040cbcabf tempest-ServerRescueTestJSON-756786842 tempest-ServerRescueTestJSON-756786842-project-member] Acquiring lock "f17efcc0-2a35-4360-abdf-1543a4cd0fcc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 726.871276] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4b75365a-fb09-4842-8956-e8a040cbcabf tempest-ServerRescueTestJSON-756786842 tempest-ServerRescueTestJSON-756786842-project-member] Lock "f17efcc0-2a35-4360-abdf-1543a4cd0fcc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 747.878776] env[68040]: WARNING oslo_vmware.rw_handles [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 747.878776] env[68040]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 747.878776] env[68040]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 747.878776] env[68040]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 747.878776] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 747.878776] env[68040]: ERROR oslo_vmware.rw_handles response.begin() [ 747.878776] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 747.878776] env[68040]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 747.878776] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 747.878776] env[68040]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 747.878776] env[68040]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 747.878776] env[68040]: ERROR oslo_vmware.rw_handles [ 747.879348] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] Downloaded image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to vmware_temp/3068b196-7b16-4d0a-a2a2-fcc526150658/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 747.880806] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] Caching image {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 747.881091] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Copying Virtual Disk [datastore2] vmware_temp/3068b196-7b16-4d0a-a2a2-fcc526150658/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk to [datastore2] vmware_temp/3068b196-7b16-4d0a-a2a2-fcc526150658/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk {{(pid=68040) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 747.881405] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a929fd98-0601-4cce-abb3-9db7e12df67a {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.891512] env[68040]: DEBUG oslo_vmware.api [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Waiting for the task: (returnval){ [ 747.891512] env[68040]: value = "task-3200186" [ 747.891512] env[68040]: _type = "Task" [ 747.891512] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.900225] env[68040]: DEBUG oslo_vmware.api [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Task: {'id': task-3200186, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.404771] env[68040]: DEBUG oslo_vmware.exceptions [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Fault InvalidArgument not matched. {{(pid=68040) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 748.404771] env[68040]: DEBUG oslo_concurrency.lockutils [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 748.404771] env[68040]: ERROR nova.compute.manager [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 748.404771] env[68040]: Faults: ['InvalidArgument'] [ 748.404771] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] Traceback (most recent call last): [ 748.404771] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 748.404771] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] yield resources [ 748.404771] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 748.404771] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] self.driver.spawn(context, instance, image_meta, [ 748.405192] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 748.405192] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] self._vmops.spawn(context, instance, image_meta, injected_files, [ 748.405192] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 748.405192] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] self._fetch_image_if_missing(context, vi) [ 748.405192] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 748.405192] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] image_cache(vi, tmp_image_ds_loc) [ 748.405192] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 748.405192] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] vm_util.copy_virtual_disk( [ 748.405192] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 748.405192] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] session._wait_for_task(vmdk_copy_task) [ 748.405192] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 748.405192] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] return self.wait_for_task(task_ref) [ 748.405192] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 748.405636] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] return evt.wait() [ 748.405636] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 748.405636] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] result = hub.switch() [ 748.405636] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 748.405636] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] return self.greenlet.switch() [ 748.405636] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 748.405636] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] self.f(*self.args, **self.kw) [ 748.405636] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 748.405636] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] raise exceptions.translate_fault(task_info.error) [ 748.405636] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 748.405636] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] Faults: ['InvalidArgument'] [ 748.405636] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] [ 748.406064] env[68040]: INFO nova.compute.manager [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] Terminating instance [ 748.406328] env[68040]: DEBUG oslo_concurrency.lockutils [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.406534] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 748.407079] env[68040]: DEBUG oslo_concurrency.lockutils [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Acquiring lock "refresh_cache-8ae39d32-abb3-4e3e-8d2d-003eda60b136" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 748.407237] env[68040]: DEBUG oslo_concurrency.lockutils [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Acquired lock "refresh_cache-8ae39d32-abb3-4e3e-8d2d-003eda60b136" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.407403] env[68040]: DEBUG nova.network.neutron [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] Building network info cache for instance {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 748.408329] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cece524f-11db-4e76-8714-2b4d4cefbaf8 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.419072] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 748.419500] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68040) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 748.420275] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69488ffd-573f-4aeb-bada-c3ffb0eb8efc {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.426477] env[68040]: DEBUG oslo_vmware.api [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Waiting for the task: (returnval){ [ 748.426477] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52a43500-a39a-c875-e592-835f2389c71c" [ 748.426477] env[68040]: _type = "Task" [ 748.426477] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.434816] env[68040]: DEBUG oslo_vmware.api [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52a43500-a39a-c875-e592-835f2389c71c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.438460] env[68040]: DEBUG nova.network.neutron [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 748.533435] env[68040]: DEBUG nova.network.neutron [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 748.543990] env[68040]: DEBUG oslo_concurrency.lockutils [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Releasing lock "refresh_cache-8ae39d32-abb3-4e3e-8d2d-003eda60b136" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 748.544681] env[68040]: DEBUG nova.compute.manager [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 748.544961] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 748.547049] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4657c7a0-8a82-4197-9bbc-28f15b5b642a {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.559107] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] Unregistering the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 748.559505] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-338c7a1f-b307-44e7-9795-3f91918deca7 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.598675] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] Unregistered the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 748.599100] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] Deleting contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 748.599376] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Deleting the datastore file [datastore2] 8ae39d32-abb3-4e3e-8d2d-003eda60b136 {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 748.599821] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-71398887-63b0-4c2a-bde4-1279db716d7b {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.609985] env[68040]: DEBUG oslo_vmware.api [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Waiting for the task: (returnval){ [ 748.609985] env[68040]: value = "task-3200188" [ 748.609985] env[68040]: _type = "Task" [ 748.609985] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.619717] env[68040]: DEBUG oslo_vmware.api [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Task: {'id': task-3200188, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.940408] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] Preparing fetch location {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 748.940756] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Creating directory with path [datastore2] vmware_temp/fa7f9f4b-aae2-4b6b-8b82-8113ee08cc37/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 748.940960] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-72bc9da6-f2b4-4474-bf1d-16ca393d7f57 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.955102] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Created directory with path [datastore2] vmware_temp/fa7f9f4b-aae2-4b6b-8b82-8113ee08cc37/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 748.955352] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] Fetch image to [datastore2] vmware_temp/fa7f9f4b-aae2-4b6b-8b82-8113ee08cc37/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 748.955497] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to [datastore2] vmware_temp/fa7f9f4b-aae2-4b6b-8b82-8113ee08cc37/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 748.956358] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00762cea-2d3b-4039-9359-2abfd217741a {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.964385] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59e0d5c1-4090-403c-95eb-e0c069988a7b {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.975194] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f188fba-89af-434d-ae1e-4cecf87f4478 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.010891] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9dadee5-171d-41db-8ec9-82496c426a0e {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.018534] env[68040]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-8debb85b-8072-484a-939a-1bbb738bf49f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.044030] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 749.105622] env[68040]: DEBUG oslo_vmware.rw_handles [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/fa7f9f4b-aae2-4b6b-8b82-8113ee08cc37/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 749.171282] env[68040]: DEBUG oslo_vmware.rw_handles [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Completed reading data from the image iterator. {{(pid=68040) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 749.171557] env[68040]: DEBUG oslo_vmware.rw_handles [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/fa7f9f4b-aae2-4b6b-8b82-8113ee08cc37/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 749.176428] env[68040]: DEBUG oslo_vmware.api [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Task: {'id': task-3200188, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.043794} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.176736] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Deleted the datastore file {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 749.177491] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] Deleted contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 749.177491] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 749.177491] env[68040]: INFO nova.compute.manager [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] Took 0.63 seconds to destroy the instance on the hypervisor. [ 749.177770] env[68040]: DEBUG oslo.service.loopingcall [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 749.177873] env[68040]: DEBUG nova.compute.manager [-] [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] Skipping network deallocation for instance since networking was not requested. {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 749.180452] env[68040]: DEBUG nova.compute.claims [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] Aborting claim: {{(pid=68040) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 749.180586] env[68040]: DEBUG oslo_concurrency.lockutils [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 749.180893] env[68040]: DEBUG oslo_concurrency.lockutils [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 749.698842] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5cbe12a-8150-47d7-9694-0361102fc36e {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.708648] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d74373f-38f1-4c6c-afd8-38ff890a7abc {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.739630] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee7d97e6-bd73-4576-ac39-aa2648dc1440 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.748424] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-188813d7-c007-4376-bcd1-9cccc6b1233d {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.764195] env[68040]: DEBUG nova.compute.provider_tree [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 749.773242] env[68040]: DEBUG nova.scheduler.client.report [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 749.793370] env[68040]: DEBUG oslo_concurrency.lockutils [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.612s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 749.793960] env[68040]: ERROR nova.compute.manager [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 749.793960] env[68040]: Faults: ['InvalidArgument'] [ 749.793960] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] Traceback (most recent call last): [ 749.793960] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 749.793960] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] self.driver.spawn(context, instance, image_meta, [ 749.793960] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 749.793960] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] self._vmops.spawn(context, instance, image_meta, injected_files, [ 749.793960] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 749.793960] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] self._fetch_image_if_missing(context, vi) [ 749.793960] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 749.793960] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] image_cache(vi, tmp_image_ds_loc) [ 749.793960] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 749.794242] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] vm_util.copy_virtual_disk( [ 749.794242] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 749.794242] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] session._wait_for_task(vmdk_copy_task) [ 749.794242] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 749.794242] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] return self.wait_for_task(task_ref) [ 749.794242] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 749.794242] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] return evt.wait() [ 749.794242] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 749.794242] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] result = hub.switch() [ 749.794242] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 749.794242] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] return self.greenlet.switch() [ 749.794242] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 749.794242] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] self.f(*self.args, **self.kw) [ 749.794493] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 749.794493] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] raise exceptions.translate_fault(task_info.error) [ 749.794493] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 749.794493] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] Faults: ['InvalidArgument'] [ 749.794493] env[68040]: ERROR nova.compute.manager [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] [ 749.794687] env[68040]: DEBUG nova.compute.utils [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] VimFaultException {{(pid=68040) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 749.796725] env[68040]: DEBUG nova.compute.manager [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] Build of instance 8ae39d32-abb3-4e3e-8d2d-003eda60b136 was re-scheduled: A specified parameter was not correct: fileType [ 749.796725] env[68040]: Faults: ['InvalidArgument'] {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 749.797013] env[68040]: DEBUG nova.compute.manager [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] Unplugging VIFs for instance {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 749.797287] env[68040]: DEBUG oslo_concurrency.lockutils [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Acquiring lock "refresh_cache-8ae39d32-abb3-4e3e-8d2d-003eda60b136" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 749.797447] env[68040]: DEBUG oslo_concurrency.lockutils [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Acquired lock "refresh_cache-8ae39d32-abb3-4e3e-8d2d-003eda60b136" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.797611] env[68040]: DEBUG nova.network.neutron [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] Building network info cache for instance {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 749.826514] env[68040]: DEBUG nova.network.neutron [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 749.904073] env[68040]: DEBUG nova.network.neutron [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 749.914470] env[68040]: DEBUG oslo_concurrency.lockutils [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Releasing lock "refresh_cache-8ae39d32-abb3-4e3e-8d2d-003eda60b136" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 749.914855] env[68040]: DEBUG nova.compute.manager [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 749.915165] env[68040]: DEBUG nova.compute.manager [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] [instance: 8ae39d32-abb3-4e3e-8d2d-003eda60b136] Skipping network deallocation for instance since networking was not requested. {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 750.006414] env[68040]: INFO nova.scheduler.client.report [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Deleted allocations for instance 8ae39d32-abb3-4e3e-8d2d-003eda60b136 [ 750.030513] env[68040]: DEBUG oslo_concurrency.lockutils [None req-aae7c671-dd8e-438c-bc9c-bc15430ebacc tempest-ServerShowV257Test-1660784702 tempest-ServerShowV257Test-1660784702-project-member] Lock "8ae39d32-abb3-4e3e-8d2d-003eda60b136" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 100.167s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 750.057349] env[68040]: DEBUG nova.compute.manager [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 750.109400] env[68040]: DEBUG oslo_concurrency.lockutils [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 750.109685] env[68040]: DEBUG oslo_concurrency.lockutils [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 750.111164] env[68040]: INFO nova.compute.claims [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 750.530743] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fcfebac-980d-4787-a55b-49f2c75ae43d {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.539070] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-243ac2cd-ccd0-48c8-b7f7-5e28ab6bae0a {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.569485] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-989ebd77-591f-4a39-ac20-aef47af287db {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.577215] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0e347e6-0d98-4f57-befe-50a4fa0afb65 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.590638] env[68040]: DEBUG nova.compute.provider_tree [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 750.602541] env[68040]: DEBUG nova.scheduler.client.report [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 750.619293] env[68040]: DEBUG oslo_concurrency.lockutils [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.509s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 750.619776] env[68040]: DEBUG nova.compute.manager [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Start building networks asynchronously for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 750.656544] env[68040]: DEBUG nova.compute.utils [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Using /dev/sd instead of None {{(pid=68040) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 750.658155] env[68040]: DEBUG nova.compute.manager [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Not allocating networking since 'none' was specified. {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1968}} [ 750.666718] env[68040]: DEBUG nova.compute.manager [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Start building block device mappings for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 750.729403] env[68040]: DEBUG nova.compute.manager [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Start spawning the instance on the hypervisor. {{(pid=68040) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 750.756213] env[68040]: DEBUG nova.virt.hardware [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:59:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:59:33Z,direct_url=,disk_format='vmdk',id=8c308313-03d5-40b6-a5fe-9037e32dc76e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0770d674a39c40089de0aade9440b370',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:59:34Z,virtual_size=,visibility=), allow threads: False {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 750.756499] env[68040]: DEBUG nova.virt.hardware [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Flavor limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 750.756694] env[68040]: DEBUG nova.virt.hardware [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Image limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 750.756949] env[68040]: DEBUG nova.virt.hardware [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Flavor pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 750.757175] env[68040]: DEBUG nova.virt.hardware [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Image pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 750.757371] env[68040]: DEBUG nova.virt.hardware [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 750.757617] env[68040]: DEBUG nova.virt.hardware [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 750.757829] env[68040]: DEBUG nova.virt.hardware [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 750.758117] env[68040]: DEBUG nova.virt.hardware [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Got 1 possible topologies {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 750.758256] env[68040]: DEBUG nova.virt.hardware [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 750.758466] env[68040]: DEBUG nova.virt.hardware [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 750.759361] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bb6d738-067c-48fd-a6a7-29c04c051033 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.770097] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-503304a5-a986-4f53-a801-ad0efdd93a32 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.786741] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Instance VIF info [] {{(pid=68040) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 750.792547] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Creating folder: Project (77497114fab540b39e8cbdef26e94981). Parent ref: group-v639956. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 750.792840] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4da5933a-5cf3-4775-aa56-a8c52c02846d {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.804684] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Created folder: Project (77497114fab540b39e8cbdef26e94981) in parent group-v639956. [ 750.804867] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Creating folder: Instances. Parent ref: group-v639992. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 750.805111] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-104c08c4-59d0-467a-a7d3-9224bbf8f164 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.814487] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Created folder: Instances in parent group-v639992. [ 750.814806] env[68040]: DEBUG oslo.service.loopingcall [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 750.815023] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Creating VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 750.815232] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-da9759a3-1786-40d4-b97a-37ec49e4e0ce {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.832709] env[68040]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 750.832709] env[68040]: value = "task-3200191" [ 750.832709] env[68040]: _type = "Task" [ 750.832709] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.840350] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200191, 'name': CreateVM_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.343421] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200191, 'name': CreateVM_Task, 'duration_secs': 0.279052} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.343652] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Created VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 751.344019] env[68040]: DEBUG oslo_concurrency.lockutils [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 751.344221] env[68040]: DEBUG oslo_concurrency.lockutils [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.344558] env[68040]: DEBUG oslo_concurrency.lockutils [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 751.344874] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a9cedc2-b05a-409e-8b9f-b595ddd160b9 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.351335] env[68040]: DEBUG oslo_vmware.api [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Waiting for the task: (returnval){ [ 751.351335] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52857494-d40c-9a01-6f33-37e689ce0841" [ 751.351335] env[68040]: _type = "Task" [ 751.351335] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.358705] env[68040]: DEBUG oslo_vmware.api [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52857494-d40c-9a01-6f33-37e689ce0841, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.862516] env[68040]: DEBUG oslo_concurrency.lockutils [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 751.862761] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Processing image 8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 751.862976] env[68040]: DEBUG oslo_concurrency.lockutils [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 756.133783] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Acquiring lock "bce68a2b-260c-45cc-ac98-d4b01b4513a4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 756.134095] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Lock "bce68a2b-260c-45cc-ac98-d4b01b4513a4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 782.106790] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 782.107111] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 782.107171] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Starting heal instance info cache {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 782.107298] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Rebuilding the list of instances to heal {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 782.128414] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 782.128574] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 782.128706] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 782.128831] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 782.128955] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 782.129124] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 782.129271] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 782.129393] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 782.129510] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 782.129627] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 782.129748] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Didn't find any instances for network info cache update. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 782.130207] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 782.130377] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 782.130526] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 782.130670] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 782.130807] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 782.130950] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 782.131091] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68040) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 782.131232] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 782.142834] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 782.143064] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 782.143251] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 782.143408] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68040) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 782.144470] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eccb93b-2e9c-43f4-bf90-807dfac4b2c0 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.153406] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e773b7d1-e989-46d1-84fc-fede0e1d0e94 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.168906] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-558fa62c-b70c-4762-ae2d-b430d7b5002c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.174800] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db72782f-5e65-496f-a8aa-a77df27177e9 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.204492] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180970MB free_disk=125GB free_vcpus=48 pci_devices=None {{(pid=68040) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 782.204652] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 782.204854] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 782.296664] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 7374c1a5-1b4f-4026-b885-bf0eb12a850e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 782.296832] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 4cc61343-486f-466c-9881-1a6856c82748 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 782.296962] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance f23e3529-19a6-4562-ae9b-591d1a452385 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 782.297105] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 782.297235] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 3411cba3-71c9-4334-bc79-4e322f4231f1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 782.297394] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 97b050ff-2997-4504-8787-04f1221251b8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 782.297519] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance b81d413c-2449-471a-b3d9-693fc0ab2824 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 782.297638] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 39de4e78-44cd-4582-998e-88ce6de2d51c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 782.297756] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance a89ff564-ea35-4000-8efa-2c1ec2b61759 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 782.297886] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 42f39352-e703-4ebf-9559-4c8b5abca70e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 782.308816] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance d1819f29-a891-47dd-a456-8f3b127daf6f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 782.318849] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 1812f13e-b03d-48d4-940a-43974784265b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 782.328680] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance a7853da1-b00c-4b05-8f4a-f928fcb59cb2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 782.340168] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 4e0f3617-aef9-4d66-8243-ff530b4084cd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 782.350281] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 28fd3e76-1a9b-4273-b951-e50a8506a9bc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 782.360040] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 810b9f3c-7a92-40ab-8630-5c1ad6e4762c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 782.369783] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 0a9103d6-2461-4ed3-93fa-a0149ccc5267 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 782.379975] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 0a0d385d-3255-4755-8987-a26cd28006cb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 782.390630] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 0cc0463e-1e55-4dd5-96b2-ee15025e689b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 782.400457] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance de1b8ef9-0088-4d2a-985e-d04fcff55d31 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 782.410084] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance e2a9808e-6da7-4e53-a6d3-d3144ecf158a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 782.420125] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance ba480b6b-3d33-4f60-b045-21fe059fd0a6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 782.430171] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 0b61102f-1b2e-4962-b94b-d27d394c5aef has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 782.440136] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 856ec4dd-3a1d-4140-b3d5-52690cf87f92 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 782.451173] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance dae1bdde-e497-4ee6-9582-4988c5ae7a96 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 782.460741] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 1b382d10-944a-4817-b959-c8ad2664309e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 782.470531] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 37f5c20e-dbc4-46a3-a83f-c7329f7a764c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 782.479755] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 25cb12ed-d0ed-402f-ba73-3c6c835adb17 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 782.490094] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance d42d2ca9-ac93-4efb-92a0-de248221dd43 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 782.499359] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance db379674-cc77-430b-bd6d-2f674d57a7ee has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 782.509240] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance f17efcc0-2a35-4360-abdf-1543a4cd0fcc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 782.518616] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance bce68a2b-260c-45cc-ac98-d4b01b4513a4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 782.518859] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 782.519013] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 782.877025] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e91b4166-e1e9-47a5-91b1-af55c0f2daf2 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.884332] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc733e0b-c814-4703-91ee-45f6e4f82934 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.913961] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3b2c62e-1d84-40e6-aedd-2ab2b1249a7a {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.921151] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8213f07-e7a7-47d0-9c1c-86d86fde1bde {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.934130] env[68040]: DEBUG nova.compute.provider_tree [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 782.942090] env[68040]: DEBUG nova.scheduler.client.report [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 782.957528] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68040) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 782.957760] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.753s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 798.971256] env[68040]: WARNING oslo_vmware.rw_handles [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 798.971256] env[68040]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 798.971256] env[68040]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 798.971256] env[68040]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 798.971256] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 798.971256] env[68040]: ERROR oslo_vmware.rw_handles response.begin() [ 798.971256] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 798.971256] env[68040]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 798.971256] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 798.971256] env[68040]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 798.971256] env[68040]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 798.971256] env[68040]: ERROR oslo_vmware.rw_handles [ 798.971795] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] Downloaded image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to vmware_temp/fa7f9f4b-aae2-4b6b-8b82-8113ee08cc37/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 798.973427] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] Caching image {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 798.973711] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Copying Virtual Disk [datastore2] vmware_temp/fa7f9f4b-aae2-4b6b-8b82-8113ee08cc37/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk to [datastore2] vmware_temp/fa7f9f4b-aae2-4b6b-8b82-8113ee08cc37/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk {{(pid=68040) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 798.974250] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a90a4242-e8cd-4e12-9ad7-d122dc96b6a1 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.982090] env[68040]: DEBUG oslo_vmware.api [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Waiting for the task: (returnval){ [ 798.982090] env[68040]: value = "task-3200192" [ 798.982090] env[68040]: _type = "Task" [ 798.982090] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.990190] env[68040]: DEBUG oslo_vmware.api [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Task: {'id': task-3200192, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.492547] env[68040]: DEBUG oslo_vmware.exceptions [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Fault InvalidArgument not matched. {{(pid=68040) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 799.492833] env[68040]: DEBUG oslo_concurrency.lockutils [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 799.493411] env[68040]: ERROR nova.compute.manager [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 799.493411] env[68040]: Faults: ['InvalidArgument'] [ 799.493411] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] Traceback (most recent call last): [ 799.493411] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 799.493411] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] yield resources [ 799.493411] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 799.493411] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] self.driver.spawn(context, instance, image_meta, [ 799.493411] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 799.493411] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 799.493411] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 799.493411] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] self._fetch_image_if_missing(context, vi) [ 799.493411] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 799.493793] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] image_cache(vi, tmp_image_ds_loc) [ 799.493793] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 799.493793] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] vm_util.copy_virtual_disk( [ 799.493793] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 799.493793] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] session._wait_for_task(vmdk_copy_task) [ 799.493793] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 799.493793] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] return self.wait_for_task(task_ref) [ 799.493793] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 799.493793] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] return evt.wait() [ 799.493793] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 799.493793] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] result = hub.switch() [ 799.493793] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 799.493793] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] return self.greenlet.switch() [ 799.494278] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 799.494278] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] self.f(*self.args, **self.kw) [ 799.494278] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 799.494278] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] raise exceptions.translate_fault(task_info.error) [ 799.494278] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 799.494278] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] Faults: ['InvalidArgument'] [ 799.494278] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] [ 799.494278] env[68040]: INFO nova.compute.manager [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] Terminating instance [ 799.495371] env[68040]: DEBUG oslo_concurrency.lockutils [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 799.495582] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 799.496214] env[68040]: DEBUG nova.compute.manager [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 799.496408] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 799.496639] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ddaaaf33-7be9-44b7-a7a0-74a80dabee4f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.498956] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-605eec54-ca28-4135-83c4-c38eece2f3a8 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.505602] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] Unregistering the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 799.505817] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7757cc39-28db-42e7-be06-b30ee9c90522 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.508064] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 799.508283] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68040) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 799.509191] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-667d2dc3-651f-454e-b2db-7b808ea79a1b {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.513687] env[68040]: DEBUG oslo_vmware.api [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Waiting for the task: (returnval){ [ 799.513687] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52c5b5a9-32bc-49cc-6de4-b96be344e414" [ 799.513687] env[68040]: _type = "Task" [ 799.513687] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.522176] env[68040]: DEBUG oslo_vmware.api [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52c5b5a9-32bc-49cc-6de4-b96be344e414, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.577176] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] Unregistered the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 799.577176] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] Deleting contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 799.577176] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Deleting the datastore file [datastore2] 7374c1a5-1b4f-4026-b885-bf0eb12a850e {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 799.577176] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fda35847-736c-4091-8ab9-26ddf1d5bfab {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.585017] env[68040]: DEBUG oslo_vmware.api [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Waiting for the task: (returnval){ [ 799.585017] env[68040]: value = "task-3200194" [ 799.585017] env[68040]: _type = "Task" [ 799.585017] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.591358] env[68040]: DEBUG oslo_vmware.api [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Task: {'id': task-3200194, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.024195] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Preparing fetch location {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 800.024467] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Creating directory with path [datastore2] vmware_temp/48739043-37a9-4097-a7db-547390fda054/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 800.024697] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fdcfba5d-d06d-48da-9daf-168ea9e84223 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.036812] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Created directory with path [datastore2] vmware_temp/48739043-37a9-4097-a7db-547390fda054/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 800.037016] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Fetch image to [datastore2] vmware_temp/48739043-37a9-4097-a7db-547390fda054/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 800.037203] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to [datastore2] vmware_temp/48739043-37a9-4097-a7db-547390fda054/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 800.037942] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-300d4c69-9096-4f8e-a0d9-bda29c40a884 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.044727] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b68fb68-5678-41c8-87a8-ad2175407f6c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.054092] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0b3a54d-6b3e-4b02-a68e-2ac297cfa7d9 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.083760] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffde0051-41d9-48f0-a8da-937a5e7171a1 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.095260] env[68040]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-0384664f-b0e1-4be4-8bf9-f9fd75d92ae0 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.096958] env[68040]: DEBUG oslo_vmware.api [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Task: {'id': task-3200194, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078638} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.097208] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Deleted the datastore file {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 800.097393] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] Deleted contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 800.097568] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 800.097741] env[68040]: INFO nova.compute.manager [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] Took 0.60 seconds to destroy the instance on the hypervisor. [ 800.099799] env[68040]: DEBUG nova.compute.claims [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] Aborting claim: {{(pid=68040) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 800.099992] env[68040]: DEBUG oslo_concurrency.lockutils [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 800.100228] env[68040]: DEBUG oslo_concurrency.lockutils [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 800.117487] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 800.179747] env[68040]: DEBUG oslo_vmware.rw_handles [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/48739043-37a9-4097-a7db-547390fda054/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 800.241253] env[68040]: DEBUG oslo_vmware.rw_handles [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Completed reading data from the image iterator. {{(pid=68040) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 800.241504] env[68040]: DEBUG oslo_vmware.rw_handles [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/48739043-37a9-4097-a7db-547390fda054/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 800.563178] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-520f3950-6aeb-486e-b884-103ecf005d68 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.571087] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-732948d0-bf60-4eca-8f72-90bd1447af75 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.600713] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdb44a5b-07fb-4b30-a5e4-77000160f697 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.607771] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66a708d8-1279-418b-bfea-bc8d9b091ab6 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.620456] env[68040]: DEBUG nova.compute.provider_tree [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 800.629876] env[68040]: DEBUG nova.scheduler.client.report [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 800.643750] env[68040]: DEBUG oslo_concurrency.lockutils [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.543s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 800.644292] env[68040]: ERROR nova.compute.manager [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 800.644292] env[68040]: Faults: ['InvalidArgument'] [ 800.644292] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] Traceback (most recent call last): [ 800.644292] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 800.644292] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] self.driver.spawn(context, instance, image_meta, [ 800.644292] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 800.644292] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 800.644292] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 800.644292] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] self._fetch_image_if_missing(context, vi) [ 800.644292] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 800.644292] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] image_cache(vi, tmp_image_ds_loc) [ 800.644292] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 800.644650] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] vm_util.copy_virtual_disk( [ 800.644650] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 800.644650] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] session._wait_for_task(vmdk_copy_task) [ 800.644650] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 800.644650] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] return self.wait_for_task(task_ref) [ 800.644650] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 800.644650] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] return evt.wait() [ 800.644650] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 800.644650] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] result = hub.switch() [ 800.644650] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 800.644650] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] return self.greenlet.switch() [ 800.644650] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 800.644650] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] self.f(*self.args, **self.kw) [ 800.645047] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 800.645047] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] raise exceptions.translate_fault(task_info.error) [ 800.645047] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 800.645047] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] Faults: ['InvalidArgument'] [ 800.645047] env[68040]: ERROR nova.compute.manager [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] [ 800.645047] env[68040]: DEBUG nova.compute.utils [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] VimFaultException {{(pid=68040) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 800.646737] env[68040]: DEBUG nova.compute.manager [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] Build of instance 7374c1a5-1b4f-4026-b885-bf0eb12a850e was re-scheduled: A specified parameter was not correct: fileType [ 800.646737] env[68040]: Faults: ['InvalidArgument'] {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 800.647115] env[68040]: DEBUG nova.compute.manager [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] Unplugging VIFs for instance {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 800.647294] env[68040]: DEBUG nova.compute.manager [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 800.647535] env[68040]: DEBUG nova.compute.manager [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 800.647709] env[68040]: DEBUG nova.network.neutron [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 800.980470] env[68040]: DEBUG nova.network.neutron [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 800.991828] env[68040]: INFO nova.compute.manager [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] [instance: 7374c1a5-1b4f-4026-b885-bf0eb12a850e] Took 0.34 seconds to deallocate network for instance. [ 801.083114] env[68040]: INFO nova.scheduler.client.report [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Deleted allocations for instance 7374c1a5-1b4f-4026-b885-bf0eb12a850e [ 801.107917] env[68040]: DEBUG oslo_concurrency.lockutils [None req-abfb4df0-74ef-4eff-ad18-f7d107e5ac25 tempest-FloatingIPsAssociationTestJSON-43156565 tempest-FloatingIPsAssociationTestJSON-43156565-project-member] Lock "7374c1a5-1b4f-4026-b885-bf0eb12a850e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 154.053s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 801.121733] env[68040]: DEBUG nova.compute.manager [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 801.170772] env[68040]: DEBUG oslo_concurrency.lockutils [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 801.171136] env[68040]: DEBUG oslo_concurrency.lockutils [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 801.172611] env[68040]: INFO nova.compute.claims [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 801.583530] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-805eeb24-7fdb-4927-800a-0169989307ad {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.591022] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4345640-9381-48ab-9ce9-184b92c622b5 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.621114] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58ba66c1-5866-4059-a738-a1549f34b4cf {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.628184] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1af125d4-102f-4bb2-afaf-748ac2508ccb {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.641180] env[68040]: DEBUG nova.compute.provider_tree [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 801.649542] env[68040]: DEBUG nova.scheduler.client.report [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 801.664232] env[68040]: DEBUG oslo_concurrency.lockutils [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.493s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 801.664729] env[68040]: DEBUG nova.compute.manager [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Start building networks asynchronously for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 801.696478] env[68040]: DEBUG nova.compute.utils [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Using /dev/sd instead of None {{(pid=68040) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 801.697989] env[68040]: DEBUG nova.compute.manager [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Allocating IP information in the background. {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 801.698138] env[68040]: DEBUG nova.network.neutron [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] allocate_for_instance() {{(pid=68040) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 801.706630] env[68040]: DEBUG nova.compute.manager [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Start building block device mappings for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 801.771752] env[68040]: DEBUG nova.compute.manager [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Start spawning the instance on the hypervisor. {{(pid=68040) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 801.803075] env[68040]: DEBUG nova.virt.hardware [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:59:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:59:33Z,direct_url=,disk_format='vmdk',id=8c308313-03d5-40b6-a5fe-9037e32dc76e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0770d674a39c40089de0aade9440b370',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:59:34Z,virtual_size=,visibility=), allow threads: False {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 801.803370] env[68040]: DEBUG nova.virt.hardware [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Flavor limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 801.803533] env[68040]: DEBUG nova.virt.hardware [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Image limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 801.803719] env[68040]: DEBUG nova.virt.hardware [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Flavor pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 801.803868] env[68040]: DEBUG nova.virt.hardware [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Image pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 801.804107] env[68040]: DEBUG nova.virt.hardware [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 801.804272] env[68040]: DEBUG nova.virt.hardware [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 801.804394] env[68040]: DEBUG nova.virt.hardware [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 801.804564] env[68040]: DEBUG nova.virt.hardware [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Got 1 possible topologies {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 801.804727] env[68040]: DEBUG nova.virt.hardware [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 801.804900] env[68040]: DEBUG nova.virt.hardware [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 801.806089] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97ee0dc6-733b-4808-98b9-0183b01d8ff1 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.814712] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed9000e7-0c12-45e6-b593-a5469b408857 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.986921] env[68040]: DEBUG nova.policy [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ed28c031a1af4a719176cdfbe4006b58', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4ef16815e17940d28aa5939012373ebd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68040) authorize /opt/stack/nova/nova/policy.py:203}} [ 802.356415] env[68040]: DEBUG nova.network.neutron [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Successfully created port: 38ae6aa1-6285-41d7-a625-3da055447f17 {{(pid=68040) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 802.985748] env[68040]: DEBUG nova.network.neutron [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Successfully updated port: 38ae6aa1-6285-41d7-a625-3da055447f17 {{(pid=68040) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 803.005573] env[68040]: DEBUG oslo_concurrency.lockutils [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Acquiring lock "refresh_cache-d1819f29-a891-47dd-a456-8f3b127daf6f" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 803.005734] env[68040]: DEBUG oslo_concurrency.lockutils [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Acquired lock "refresh_cache-d1819f29-a891-47dd-a456-8f3b127daf6f" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 803.005881] env[68040]: DEBUG nova.network.neutron [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Building network info cache for instance {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 803.056981] env[68040]: DEBUG nova.network.neutron [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 803.231315] env[68040]: DEBUG nova.network.neutron [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Updating instance_info_cache with network_info: [{"id": "38ae6aa1-6285-41d7-a625-3da055447f17", "address": "fa:16:3e:62:2c:5c", "network": {"id": "ca1a84af-ab33-497c-8767-fd4463c076be", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.97", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "0770d674a39c40089de0aade9440b370", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38ae6aa1-62", "ovs_interfaceid": "38ae6aa1-6285-41d7-a625-3da055447f17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 803.242798] env[68040]: DEBUG oslo_concurrency.lockutils [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Releasing lock "refresh_cache-d1819f29-a891-47dd-a456-8f3b127daf6f" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 803.243430] env[68040]: DEBUG nova.compute.manager [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Instance network_info: |[{"id": "38ae6aa1-6285-41d7-a625-3da055447f17", "address": "fa:16:3e:62:2c:5c", "network": {"id": "ca1a84af-ab33-497c-8767-fd4463c076be", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.97", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "0770d674a39c40089de0aade9440b370", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38ae6aa1-62", "ovs_interfaceid": "38ae6aa1-6285-41d7-a625-3da055447f17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 803.243572] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:62:2c:5c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7e0240aa-a694-48fc-a0f9-6f2d3e71aa12', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '38ae6aa1-6285-41d7-a625-3da055447f17', 'vif_model': 'vmxnet3'}] {{(pid=68040) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 803.251841] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Creating folder: Project (4ef16815e17940d28aa5939012373ebd). Parent ref: group-v639956. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 803.252364] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5ce50e68-11f7-4de6-b0f8-bdf345bc5598 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.265425] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Created folder: Project (4ef16815e17940d28aa5939012373ebd) in parent group-v639956. [ 803.265607] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Creating folder: Instances. Parent ref: group-v639995. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 803.265817] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e94f3253-8d4b-4175-aa7f-195b95e3cf22 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.274467] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Created folder: Instances in parent group-v639995. [ 803.274688] env[68040]: DEBUG oslo.service.loopingcall [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 803.274864] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Creating VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 803.275062] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-55f51aa9-6e7e-45b1-8f27-43eb957cdf05 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.294695] env[68040]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 803.294695] env[68040]: value = "task-3200197" [ 803.294695] env[68040]: _type = "Task" [ 803.294695] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.304882] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200197, 'name': CreateVM_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.459224] env[68040]: DEBUG nova.compute.manager [req-7718a8db-eb32-406d-b604-1947be531b02 req-66e4daad-f08c-4925-9865-586357e1933d service nova] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Received event network-vif-plugged-38ae6aa1-6285-41d7-a625-3da055447f17 {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 803.459455] env[68040]: DEBUG oslo_concurrency.lockutils [req-7718a8db-eb32-406d-b604-1947be531b02 req-66e4daad-f08c-4925-9865-586357e1933d service nova] Acquiring lock "d1819f29-a891-47dd-a456-8f3b127daf6f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 803.459881] env[68040]: DEBUG oslo_concurrency.lockutils [req-7718a8db-eb32-406d-b604-1947be531b02 req-66e4daad-f08c-4925-9865-586357e1933d service nova] Lock "d1819f29-a891-47dd-a456-8f3b127daf6f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 803.459881] env[68040]: DEBUG oslo_concurrency.lockutils [req-7718a8db-eb32-406d-b604-1947be531b02 req-66e4daad-f08c-4925-9865-586357e1933d service nova] Lock "d1819f29-a891-47dd-a456-8f3b127daf6f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 803.459881] env[68040]: DEBUG nova.compute.manager [req-7718a8db-eb32-406d-b604-1947be531b02 req-66e4daad-f08c-4925-9865-586357e1933d service nova] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] No waiting events found dispatching network-vif-plugged-38ae6aa1-6285-41d7-a625-3da055447f17 {{(pid=68040) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 803.460009] env[68040]: WARNING nova.compute.manager [req-7718a8db-eb32-406d-b604-1947be531b02 req-66e4daad-f08c-4925-9865-586357e1933d service nova] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Received unexpected event network-vif-plugged-38ae6aa1-6285-41d7-a625-3da055447f17 for instance with vm_state building and task_state spawning. [ 803.460169] env[68040]: DEBUG nova.compute.manager [req-7718a8db-eb32-406d-b604-1947be531b02 req-66e4daad-f08c-4925-9865-586357e1933d service nova] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Received event network-changed-38ae6aa1-6285-41d7-a625-3da055447f17 {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 803.460324] env[68040]: DEBUG nova.compute.manager [req-7718a8db-eb32-406d-b604-1947be531b02 req-66e4daad-f08c-4925-9865-586357e1933d service nova] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Refreshing instance network info cache due to event network-changed-38ae6aa1-6285-41d7-a625-3da055447f17. {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 803.460503] env[68040]: DEBUG oslo_concurrency.lockutils [req-7718a8db-eb32-406d-b604-1947be531b02 req-66e4daad-f08c-4925-9865-586357e1933d service nova] Acquiring lock "refresh_cache-d1819f29-a891-47dd-a456-8f3b127daf6f" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 803.460637] env[68040]: DEBUG oslo_concurrency.lockutils [req-7718a8db-eb32-406d-b604-1947be531b02 req-66e4daad-f08c-4925-9865-586357e1933d service nova] Acquired lock "refresh_cache-d1819f29-a891-47dd-a456-8f3b127daf6f" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 803.460791] env[68040]: DEBUG nova.network.neutron [req-7718a8db-eb32-406d-b604-1947be531b02 req-66e4daad-f08c-4925-9865-586357e1933d service nova] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Refreshing network info cache for port 38ae6aa1-6285-41d7-a625-3da055447f17 {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 803.741403] env[68040]: DEBUG nova.network.neutron [req-7718a8db-eb32-406d-b604-1947be531b02 req-66e4daad-f08c-4925-9865-586357e1933d service nova] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Updated VIF entry in instance network info cache for port 38ae6aa1-6285-41d7-a625-3da055447f17. {{(pid=68040) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 803.741403] env[68040]: DEBUG nova.network.neutron [req-7718a8db-eb32-406d-b604-1947be531b02 req-66e4daad-f08c-4925-9865-586357e1933d service nova] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Updating instance_info_cache with network_info: [{"id": "38ae6aa1-6285-41d7-a625-3da055447f17", "address": "fa:16:3e:62:2c:5c", "network": {"id": "ca1a84af-ab33-497c-8767-fd4463c076be", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.97", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "0770d674a39c40089de0aade9440b370", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38ae6aa1-62", "ovs_interfaceid": "38ae6aa1-6285-41d7-a625-3da055447f17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 803.752315] env[68040]: DEBUG oslo_concurrency.lockutils [req-7718a8db-eb32-406d-b604-1947be531b02 req-66e4daad-f08c-4925-9865-586357e1933d service nova] Releasing lock "refresh_cache-d1819f29-a891-47dd-a456-8f3b127daf6f" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 803.804156] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200197, 'name': CreateVM_Task, 'duration_secs': 0.29691} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.804709] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Created VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 803.805497] env[68040]: DEBUG oslo_concurrency.lockutils [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 803.805815] env[68040]: DEBUG oslo_concurrency.lockutils [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 803.806241] env[68040]: DEBUG oslo_concurrency.lockutils [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 803.806590] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7049d422-23ff-4b14-a888-5b02b9c5001d {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.811721] env[68040]: DEBUG oslo_vmware.api [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Waiting for the task: (returnval){ [ 803.811721] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52d42e82-f4f2-d3be-1a28-5521de037f19" [ 803.811721] env[68040]: _type = "Task" [ 803.811721] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.820556] env[68040]: DEBUG oslo_vmware.api [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52d42e82-f4f2-d3be-1a28-5521de037f19, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.322266] env[68040]: DEBUG oslo_concurrency.lockutils [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 804.322596] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Processing image 8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 804.322845] env[68040]: DEBUG oslo_concurrency.lockutils [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 809.296230] env[68040]: DEBUG oslo_concurrency.lockutils [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Acquiring lock "1e43f6be-f6a3-4569-adea-c82a5d709247" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 809.296588] env[68040]: DEBUG oslo_concurrency.lockutils [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Lock "1e43f6be-f6a3-4569-adea-c82a5d709247" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 842.830483] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 842.852193] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 842.852353] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 842.852673] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 842.852849] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 842.852997] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 842.983949] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 842.984216] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 842.984335] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Starting heal instance info cache {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 842.984460] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Rebuilding the list of instances to heal {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 843.005710] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 843.005872] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 843.006015] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 843.006155] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 843.006280] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 843.006406] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 843.006523] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 843.006641] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 843.006980] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 843.006980] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 843.006980] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Didn't find any instances for network info cache update. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 843.007499] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 843.007642] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68040) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 843.007816] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 843.021534] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 843.021775] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 843.021915] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.022078] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68040) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 843.023126] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-236703f4-c73c-4f1f-a102-4893b70740ba {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.031545] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c73edc2b-4cb1-4ad4-801c-2453be19343d {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.045296] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e555e8d-be4c-4563-ad6a-65f030480d21 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.051523] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00b6d154-5273-4944-955b-058bce8c05f8 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.079907] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180979MB free_disk=125GB free_vcpus=48 pci_devices=None {{(pid=68040) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 843.080077] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 843.080247] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 843.152358] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 4cc61343-486f-466c-9881-1a6856c82748 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 843.152519] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance f23e3529-19a6-4562-ae9b-591d1a452385 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 843.152674] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 843.152818] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 3411cba3-71c9-4334-bc79-4e322f4231f1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 843.152943] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 97b050ff-2997-4504-8787-04f1221251b8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 843.153077] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance b81d413c-2449-471a-b3d9-693fc0ab2824 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 843.153201] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 39de4e78-44cd-4582-998e-88ce6de2d51c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 843.153321] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance a89ff564-ea35-4000-8efa-2c1ec2b61759 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 843.153439] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 42f39352-e703-4ebf-9559-4c8b5abca70e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 843.153730] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance d1819f29-a891-47dd-a456-8f3b127daf6f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 843.165295] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 1812f13e-b03d-48d4-940a-43974784265b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 843.175364] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance a7853da1-b00c-4b05-8f4a-f928fcb59cb2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 843.187248] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 4e0f3617-aef9-4d66-8243-ff530b4084cd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 843.197179] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 28fd3e76-1a9b-4273-b951-e50a8506a9bc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 843.206953] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 810b9f3c-7a92-40ab-8630-5c1ad6e4762c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 843.216761] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 0a9103d6-2461-4ed3-93fa-a0149ccc5267 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 843.226748] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 0a0d385d-3255-4755-8987-a26cd28006cb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 843.236554] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 0cc0463e-1e55-4dd5-96b2-ee15025e689b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 843.245467] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance de1b8ef9-0088-4d2a-985e-d04fcff55d31 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 843.254740] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance e2a9808e-6da7-4e53-a6d3-d3144ecf158a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 843.264300] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance ba480b6b-3d33-4f60-b045-21fe059fd0a6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 843.273908] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 0b61102f-1b2e-4962-b94b-d27d394c5aef has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 843.284235] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 856ec4dd-3a1d-4140-b3d5-52690cf87f92 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 843.294030] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance dae1bdde-e497-4ee6-9582-4988c5ae7a96 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 843.304746] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 1b382d10-944a-4817-b959-c8ad2664309e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 843.313967] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 37f5c20e-dbc4-46a3-a83f-c7329f7a764c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 843.324211] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 25cb12ed-d0ed-402f-ba73-3c6c835adb17 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 843.334284] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance d42d2ca9-ac93-4efb-92a0-de248221dd43 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 843.344928] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance db379674-cc77-430b-bd6d-2f674d57a7ee has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 843.355188] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance f17efcc0-2a35-4360-abdf-1543a4cd0fcc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 843.364715] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance bce68a2b-260c-45cc-ac98-d4b01b4513a4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 843.374536] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 1e43f6be-f6a3-4569-adea-c82a5d709247 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 843.374536] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 843.374536] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 843.741042] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-246bfdf4-c3ba-40ce-80ac-ab66bf628ca5 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.748535] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-560dd562-ff14-4ad4-a04a-081deae8cd50 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.777268] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e781ad11-8a29-4b81-9301-829ea42d5359 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.784041] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a23d668d-4163-47b6-99ee-087e38610773 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.797370] env[68040]: DEBUG nova.compute.provider_tree [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 843.805517] env[68040]: DEBUG nova.scheduler.client.report [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 843.818688] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68040) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 843.818871] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.739s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 847.908656] env[68040]: WARNING oslo_vmware.rw_handles [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 847.908656] env[68040]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 847.908656] env[68040]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 847.908656] env[68040]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 847.908656] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 847.908656] env[68040]: ERROR oslo_vmware.rw_handles response.begin() [ 847.908656] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 847.908656] env[68040]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 847.908656] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 847.908656] env[68040]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 847.908656] env[68040]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 847.908656] env[68040]: ERROR oslo_vmware.rw_handles [ 847.909220] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Downloaded image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to vmware_temp/48739043-37a9-4097-a7db-547390fda054/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 847.910868] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Caching image {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 847.911200] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Copying Virtual Disk [datastore2] vmware_temp/48739043-37a9-4097-a7db-547390fda054/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk to [datastore2] vmware_temp/48739043-37a9-4097-a7db-547390fda054/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk {{(pid=68040) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 847.911442] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f65206e6-7827-47f3-ac1b-7b3144f2f629 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.919983] env[68040]: DEBUG oslo_vmware.api [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Waiting for the task: (returnval){ [ 847.919983] env[68040]: value = "task-3200198" [ 847.919983] env[68040]: _type = "Task" [ 847.919983] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.928791] env[68040]: DEBUG oslo_vmware.api [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Task: {'id': task-3200198, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.429701] env[68040]: DEBUG oslo_vmware.exceptions [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Fault InvalidArgument not matched. {{(pid=68040) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 848.429853] env[68040]: DEBUG oslo_concurrency.lockutils [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 848.430368] env[68040]: ERROR nova.compute.manager [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 848.430368] env[68040]: Faults: ['InvalidArgument'] [ 848.430368] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] Traceback (most recent call last): [ 848.430368] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 848.430368] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] yield resources [ 848.430368] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 848.430368] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] self.driver.spawn(context, instance, image_meta, [ 848.430368] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 848.430368] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] self._vmops.spawn(context, instance, image_meta, injected_files, [ 848.430368] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 848.430368] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] self._fetch_image_if_missing(context, vi) [ 848.430368] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 848.430711] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] image_cache(vi, tmp_image_ds_loc) [ 848.430711] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 848.430711] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] vm_util.copy_virtual_disk( [ 848.430711] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 848.430711] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] session._wait_for_task(vmdk_copy_task) [ 848.430711] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 848.430711] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] return self.wait_for_task(task_ref) [ 848.430711] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 848.430711] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] return evt.wait() [ 848.430711] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 848.430711] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] result = hub.switch() [ 848.430711] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 848.430711] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] return self.greenlet.switch() [ 848.431070] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 848.431070] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] self.f(*self.args, **self.kw) [ 848.431070] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 848.431070] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] raise exceptions.translate_fault(task_info.error) [ 848.431070] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 848.431070] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] Faults: ['InvalidArgument'] [ 848.431070] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] [ 848.431070] env[68040]: INFO nova.compute.manager [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Terminating instance [ 848.432941] env[68040]: DEBUG nova.compute.manager [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 848.433153] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 848.433464] env[68040]: DEBUG oslo_concurrency.lockutils [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.433629] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 848.434338] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d485d26-db8d-4305-84f0-77734274cc4e {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.436797] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4c70846a-be6f-46a1-968c-c03f07162396 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.442218] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Unregistering the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 848.442418] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a2d3958f-1232-4d7b-b284-cdacc91b75dc {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.444640] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 848.444813] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68040) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 848.445472] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e88df12-eb1c-47cc-a425-80bb55050350 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.449862] env[68040]: DEBUG oslo_vmware.api [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Waiting for the task: (returnval){ [ 848.449862] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52b7d03f-57de-5276-ee08-0c9aae819bce" [ 848.449862] env[68040]: _type = "Task" [ 848.449862] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.456965] env[68040]: DEBUG oslo_vmware.api [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52b7d03f-57de-5276-ee08-0c9aae819bce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.512070] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Unregistered the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 848.512189] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Deleting contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 848.512339] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Deleting the datastore file [datastore2] 4cc61343-486f-466c-9881-1a6856c82748 {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 848.512603] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-31c7e751-bf1c-4aa8-b168-1057b637dc9d {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.519069] env[68040]: DEBUG oslo_vmware.api [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Waiting for the task: (returnval){ [ 848.519069] env[68040]: value = "task-3200200" [ 848.519069] env[68040]: _type = "Task" [ 848.519069] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.526776] env[68040]: DEBUG oslo_vmware.api [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Task: {'id': task-3200200, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.960714] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Preparing fetch location {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 848.961119] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Creating directory with path [datastore2] vmware_temp/0ce520ab-dde7-49a8-b8c4-31d751902048/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 848.961235] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ce49ef7b-a02c-4348-bc84-bf074d6d3287 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.973024] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Created directory with path [datastore2] vmware_temp/0ce520ab-dde7-49a8-b8c4-31d751902048/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 848.973221] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Fetch image to [datastore2] vmware_temp/0ce520ab-dde7-49a8-b8c4-31d751902048/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 848.973401] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to [datastore2] vmware_temp/0ce520ab-dde7-49a8-b8c4-31d751902048/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 848.974152] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfabef7f-c9a9-4ef9-8eb5-3bca193d4366 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.982155] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53497d9f-70ec-415d-913b-58a556190a8b {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.991909] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e038720-5201-4c6b-acda-52a3b6255e49 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.026592] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d9ec648-3d65-40cb-87a9-7d5062af92e5 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.033511] env[68040]: DEBUG oslo_vmware.api [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Task: {'id': task-3200200, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.065078} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.034951] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Deleted the datastore file {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 849.035169] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Deleted contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 849.035372] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 849.035552] env[68040]: INFO nova.compute.manager [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Took 0.60 seconds to destroy the instance on the hypervisor. [ 849.037671] env[68040]: DEBUG nova.compute.claims [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Aborting claim: {{(pid=68040) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 849.037863] env[68040]: DEBUG oslo_concurrency.lockutils [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 849.038102] env[68040]: DEBUG oslo_concurrency.lockutils [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 849.040675] env[68040]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c7a11c36-60b3-4f8d-ab6d-19e1370aadf1 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.056953] env[68040]: DEBUG oslo_concurrency.lockutils [None req-6ed6d90f-46c7-4c92-8100-4f87f8be1f33 tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Acquiring lock "4cc61343-486f-466c-9881-1a6856c82748" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 849.063982] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 849.120273] env[68040]: DEBUG oslo_vmware.rw_handles [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0ce520ab-dde7-49a8-b8c4-31d751902048/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 849.181204] env[68040]: DEBUG oslo_vmware.rw_handles [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Completed reading data from the image iterator. {{(pid=68040) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 849.181301] env[68040]: DEBUG oslo_vmware.rw_handles [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0ce520ab-dde7-49a8-b8c4-31d751902048/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 849.499322] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-934d4b2a-09e4-4116-acfb-f4131f8d4b2e {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.506633] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-842bf84b-356d-41ac-a524-24a98f4b58f3 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.539553] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ffad7cf-f716-4672-8927-b44ec4d55f5c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.546829] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-700da24d-a202-4ea8-a5ea-defccc09ec73 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.560242] env[68040]: DEBUG nova.compute.provider_tree [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 849.570109] env[68040]: DEBUG nova.scheduler.client.report [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 849.583280] env[68040]: DEBUG oslo_concurrency.lockutils [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.545s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 849.583823] env[68040]: ERROR nova.compute.manager [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 849.583823] env[68040]: Faults: ['InvalidArgument'] [ 849.583823] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] Traceback (most recent call last): [ 849.583823] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 849.583823] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] self.driver.spawn(context, instance, image_meta, [ 849.583823] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 849.583823] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] self._vmops.spawn(context, instance, image_meta, injected_files, [ 849.583823] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 849.583823] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] self._fetch_image_if_missing(context, vi) [ 849.583823] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 849.583823] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] image_cache(vi, tmp_image_ds_loc) [ 849.583823] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 849.584186] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] vm_util.copy_virtual_disk( [ 849.584186] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 849.584186] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] session._wait_for_task(vmdk_copy_task) [ 849.584186] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 849.584186] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] return self.wait_for_task(task_ref) [ 849.584186] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 849.584186] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] return evt.wait() [ 849.584186] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 849.584186] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] result = hub.switch() [ 849.584186] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 849.584186] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] return self.greenlet.switch() [ 849.584186] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 849.584186] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] self.f(*self.args, **self.kw) [ 849.584721] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 849.584721] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] raise exceptions.translate_fault(task_info.error) [ 849.584721] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 849.584721] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] Faults: ['InvalidArgument'] [ 849.584721] env[68040]: ERROR nova.compute.manager [instance: 4cc61343-486f-466c-9881-1a6856c82748] [ 849.584721] env[68040]: DEBUG nova.compute.utils [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] [instance: 4cc61343-486f-466c-9881-1a6856c82748] VimFaultException {{(pid=68040) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 849.586169] env[68040]: DEBUG nova.compute.manager [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Build of instance 4cc61343-486f-466c-9881-1a6856c82748 was re-scheduled: A specified parameter was not correct: fileType [ 849.586169] env[68040]: Faults: ['InvalidArgument'] {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 849.586562] env[68040]: DEBUG nova.compute.manager [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Unplugging VIFs for instance {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 849.586755] env[68040]: DEBUG nova.compute.manager [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 849.586916] env[68040]: DEBUG nova.compute.manager [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 849.587164] env[68040]: DEBUG nova.network.neutron [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] [instance: 4cc61343-486f-466c-9881-1a6856c82748] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 849.659112] env[68040]: DEBUG oslo_concurrency.lockutils [None req-c6b25696-e488-4b01-a26c-da2b3bff00b2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Acquiring lock "f23e3529-19a6-4562-ae9b-591d1a452385" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 849.940655] env[68040]: DEBUG nova.network.neutron [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.951698] env[68040]: INFO nova.compute.manager [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Took 0.36 seconds to deallocate network for instance. [ 850.062758] env[68040]: INFO nova.scheduler.client.report [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Deleted allocations for instance 4cc61343-486f-466c-9881-1a6856c82748 [ 850.091102] env[68040]: DEBUG oslo_concurrency.lockutils [None req-c45ad43f-da7b-48ca-8428-31b8ff3dc11b tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Lock "4cc61343-486f-466c-9881-1a6856c82748" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 200.696s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 850.092357] env[68040]: DEBUG oslo_concurrency.lockutils [None req-6ed6d90f-46c7-4c92-8100-4f87f8be1f33 tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Lock "4cc61343-486f-466c-9881-1a6856c82748" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 1.037s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 850.092589] env[68040]: DEBUG oslo_concurrency.lockutils [None req-6ed6d90f-46c7-4c92-8100-4f87f8be1f33 tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Acquiring lock "4cc61343-486f-466c-9881-1a6856c82748-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 850.092799] env[68040]: DEBUG oslo_concurrency.lockutils [None req-6ed6d90f-46c7-4c92-8100-4f87f8be1f33 tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Lock "4cc61343-486f-466c-9881-1a6856c82748-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 850.092975] env[68040]: DEBUG oslo_concurrency.lockutils [None req-6ed6d90f-46c7-4c92-8100-4f87f8be1f33 tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Lock "4cc61343-486f-466c-9881-1a6856c82748-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 850.095552] env[68040]: INFO nova.compute.manager [None req-6ed6d90f-46c7-4c92-8100-4f87f8be1f33 tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Terminating instance [ 850.097294] env[68040]: DEBUG nova.compute.manager [None req-6ed6d90f-46c7-4c92-8100-4f87f8be1f33 tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 850.097486] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-6ed6d90f-46c7-4c92-8100-4f87f8be1f33 tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 850.097991] env[68040]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bdb70793-5a99-4ea7-8e4e-ec58355641b0 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.102735] env[68040]: DEBUG nova.compute.manager [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 850.109902] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8562ec5d-8062-4a54-8e01-9da179d1c8ed {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.141168] env[68040]: WARNING nova.virt.vmwareapi.vmops [None req-6ed6d90f-46c7-4c92-8100-4f87f8be1f33 tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4cc61343-486f-466c-9881-1a6856c82748 could not be found. [ 850.141388] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-6ed6d90f-46c7-4c92-8100-4f87f8be1f33 tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 850.141574] env[68040]: INFO nova.compute.manager [None req-6ed6d90f-46c7-4c92-8100-4f87f8be1f33 tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Took 0.04 seconds to destroy the instance on the hypervisor. [ 850.141835] env[68040]: DEBUG oslo.service.loopingcall [None req-6ed6d90f-46c7-4c92-8100-4f87f8be1f33 tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 850.142097] env[68040]: DEBUG nova.compute.manager [-] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 850.142195] env[68040]: DEBUG nova.network.neutron [-] [instance: 4cc61343-486f-466c-9881-1a6856c82748] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 850.164326] env[68040]: DEBUG oslo_concurrency.lockutils [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 850.164579] env[68040]: DEBUG oslo_concurrency.lockutils [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 850.166804] env[68040]: INFO nova.compute.claims [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 850.176977] env[68040]: DEBUG nova.network.neutron [-] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 850.188966] env[68040]: INFO nova.compute.manager [-] [instance: 4cc61343-486f-466c-9881-1a6856c82748] Took 0.05 seconds to deallocate network for instance. [ 850.309081] env[68040]: DEBUG oslo_concurrency.lockutils [None req-6ed6d90f-46c7-4c92-8100-4f87f8be1f33 tempest-ServerExternalEventsTest-1947779233 tempest-ServerExternalEventsTest-1947779233-project-member] Lock "4cc61343-486f-466c-9881-1a6856c82748" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.216s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 850.616488] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-618124ca-2586-441a-8281-9e9a386b8a99 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.625155] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a353123a-00ad-4675-a116-af32d464c266 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.656329] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-253fb345-bfd2-4a0e-b5df-7d1ff2659fdd {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.663820] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e49f999-36e8-4a51-8d48-433b340b38e6 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.677907] env[68040]: DEBUG nova.compute.provider_tree [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 850.687100] env[68040]: DEBUG nova.scheduler.client.report [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 850.700291] env[68040]: DEBUG oslo_concurrency.lockutils [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.536s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 850.700771] env[68040]: DEBUG nova.compute.manager [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Start building networks asynchronously for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 850.737596] env[68040]: DEBUG nova.compute.utils [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Using /dev/sd instead of None {{(pid=68040) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 850.738789] env[68040]: DEBUG nova.compute.manager [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Allocating IP information in the background. {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 850.738964] env[68040]: DEBUG nova.network.neutron [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] [instance: 1812f13e-b03d-48d4-940a-43974784265b] allocate_for_instance() {{(pid=68040) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 850.750459] env[68040]: DEBUG nova.compute.manager [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Start building block device mappings for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 850.797825] env[68040]: DEBUG nova.policy [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '94706b24a18f40279edb1efca20798b1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '62e4f0e2f21a4996a7aa409aab702cec', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68040) authorize /opt/stack/nova/nova/policy.py:203}} [ 850.815386] env[68040]: DEBUG nova.compute.manager [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Start spawning the instance on the hypervisor. {{(pid=68040) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 850.842087] env[68040]: DEBUG nova.virt.hardware [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:59:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:59:33Z,direct_url=,disk_format='vmdk',id=8c308313-03d5-40b6-a5fe-9037e32dc76e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0770d674a39c40089de0aade9440b370',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:59:34Z,virtual_size=,visibility=), allow threads: False {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 850.842364] env[68040]: DEBUG nova.virt.hardware [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Flavor limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 850.842525] env[68040]: DEBUG nova.virt.hardware [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Image limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 850.842711] env[68040]: DEBUG nova.virt.hardware [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Flavor pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 850.842860] env[68040]: DEBUG nova.virt.hardware [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Image pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 850.843034] env[68040]: DEBUG nova.virt.hardware [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 850.843237] env[68040]: DEBUG nova.virt.hardware [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 850.843414] env[68040]: DEBUG nova.virt.hardware [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 850.843607] env[68040]: DEBUG nova.virt.hardware [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Got 1 possible topologies {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 850.843791] env[68040]: DEBUG nova.virt.hardware [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 850.843965] env[68040]: DEBUG nova.virt.hardware [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 850.844827] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a92cddf2-2c7d-47aa-8b61-8bc6a5c16896 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.854663] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb183c52-651a-4b6a-b89b-d04ef5281d02 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.198370] env[68040]: DEBUG oslo_concurrency.lockutils [None req-43459e79-3a4f-4b5f-828c-06661848124b tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Acquiring lock "17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 851.389815] env[68040]: DEBUG nova.network.neutron [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Successfully created port: 05ac884e-4f2b-4030-9704-16d0d2f12f28 {{(pid=68040) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 852.119872] env[68040]: DEBUG nova.compute.manager [req-52f9e1cf-0288-4aae-b91c-9ad9b5e1c3ee req-c31cf5e2-bd19-4ad5-98c7-c1c4f27bd1d5 service nova] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Received event network-vif-plugged-05ac884e-4f2b-4030-9704-16d0d2f12f28 {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 852.120356] env[68040]: DEBUG oslo_concurrency.lockutils [req-52f9e1cf-0288-4aae-b91c-9ad9b5e1c3ee req-c31cf5e2-bd19-4ad5-98c7-c1c4f27bd1d5 service nova] Acquiring lock "1812f13e-b03d-48d4-940a-43974784265b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 852.120591] env[68040]: DEBUG oslo_concurrency.lockutils [req-52f9e1cf-0288-4aae-b91c-9ad9b5e1c3ee req-c31cf5e2-bd19-4ad5-98c7-c1c4f27bd1d5 service nova] Lock "1812f13e-b03d-48d4-940a-43974784265b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 852.120800] env[68040]: DEBUG oslo_concurrency.lockutils [req-52f9e1cf-0288-4aae-b91c-9ad9b5e1c3ee req-c31cf5e2-bd19-4ad5-98c7-c1c4f27bd1d5 service nova] Lock "1812f13e-b03d-48d4-940a-43974784265b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 852.120984] env[68040]: DEBUG nova.compute.manager [req-52f9e1cf-0288-4aae-b91c-9ad9b5e1c3ee req-c31cf5e2-bd19-4ad5-98c7-c1c4f27bd1d5 service nova] [instance: 1812f13e-b03d-48d4-940a-43974784265b] No waiting events found dispatching network-vif-plugged-05ac884e-4f2b-4030-9704-16d0d2f12f28 {{(pid=68040) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 852.121170] env[68040]: WARNING nova.compute.manager [req-52f9e1cf-0288-4aae-b91c-9ad9b5e1c3ee req-c31cf5e2-bd19-4ad5-98c7-c1c4f27bd1d5 service nova] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Received unexpected event network-vif-plugged-05ac884e-4f2b-4030-9704-16d0d2f12f28 for instance with vm_state building and task_state spawning. [ 852.171814] env[68040]: DEBUG nova.network.neutron [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Successfully updated port: 05ac884e-4f2b-4030-9704-16d0d2f12f28 {{(pid=68040) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 852.188249] env[68040]: DEBUG oslo_concurrency.lockutils [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Acquiring lock "refresh_cache-1812f13e-b03d-48d4-940a-43974784265b" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 852.188396] env[68040]: DEBUG oslo_concurrency.lockutils [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Acquired lock "refresh_cache-1812f13e-b03d-48d4-940a-43974784265b" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 852.188543] env[68040]: DEBUG nova.network.neutron [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Building network info cache for instance {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 852.232933] env[68040]: DEBUG nova.network.neutron [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 852.415039] env[68040]: DEBUG nova.network.neutron [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Updating instance_info_cache with network_info: [{"id": "05ac884e-4f2b-4030-9704-16d0d2f12f28", "address": "fa:16:3e:e4:13:b9", "network": {"id": "5644a87e-3e5e-4ef0-b840-6886b5fb5b56", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-287057253-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "62e4f0e2f21a4996a7aa409aab702cec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32faf59b-014c-4f1f-8331-40df95bf741f", "external-id": "nsx-vlan-transportzone-996", "segmentation_id": 996, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05ac884e-4f", "ovs_interfaceid": "05ac884e-4f2b-4030-9704-16d0d2f12f28", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 852.429744] env[68040]: DEBUG oslo_concurrency.lockutils [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Releasing lock "refresh_cache-1812f13e-b03d-48d4-940a-43974784265b" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 852.431164] env[68040]: DEBUG nova.compute.manager [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Instance network_info: |[{"id": "05ac884e-4f2b-4030-9704-16d0d2f12f28", "address": "fa:16:3e:e4:13:b9", "network": {"id": "5644a87e-3e5e-4ef0-b840-6886b5fb5b56", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-287057253-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "62e4f0e2f21a4996a7aa409aab702cec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32faf59b-014c-4f1f-8331-40df95bf741f", "external-id": "nsx-vlan-transportzone-996", "segmentation_id": 996, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05ac884e-4f", "ovs_interfaceid": "05ac884e-4f2b-4030-9704-16d0d2f12f28", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 852.431341] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e4:13:b9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '32faf59b-014c-4f1f-8331-40df95bf741f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '05ac884e-4f2b-4030-9704-16d0d2f12f28', 'vif_model': 'vmxnet3'}] {{(pid=68040) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 852.438155] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Creating folder: Project (62e4f0e2f21a4996a7aa409aab702cec). Parent ref: group-v639956. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 852.438684] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6d0a599f-1c82-4c61-8f8a-6df7fb879921 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.450138] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Created folder: Project (62e4f0e2f21a4996a7aa409aab702cec) in parent group-v639956. [ 852.450337] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Creating folder: Instances. Parent ref: group-v639998. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 852.450560] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d69e296b-b983-4393-8328-1a864296fde8 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.459238] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Created folder: Instances in parent group-v639998. [ 852.459480] env[68040]: DEBUG oslo.service.loopingcall [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 852.459664] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Creating VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 852.459859] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1ce84df3-25d3-417e-8539-a59711bc408c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.480775] env[68040]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 852.480775] env[68040]: value = "task-3200203" [ 852.480775] env[68040]: _type = "Task" [ 852.480775] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.488094] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200203, 'name': CreateVM_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.971769] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Acquiring lock "3738de32-79cd-4b04-8081-cc1146730c75" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 852.971992] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Lock "3738de32-79cd-4b04-8081-cc1146730c75" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 852.990811] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200203, 'name': CreateVM_Task, 'duration_secs': 0.276119} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.991080] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Created VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 852.991740] env[68040]: DEBUG oslo_concurrency.lockutils [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 852.991910] env[68040]: DEBUG oslo_concurrency.lockutils [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 852.992243] env[68040]: DEBUG oslo_concurrency.lockutils [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 852.992490] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69bdf803-19de-4a2e-9477-1648c2fd1468 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.997706] env[68040]: DEBUG oslo_vmware.api [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Waiting for the task: (returnval){ [ 852.997706] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52ef479e-db21-c246-6ce3-366999ad8316" [ 852.997706] env[68040]: _type = "Task" [ 852.997706] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.005238] env[68040]: DEBUG oslo_vmware.api [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52ef479e-db21-c246-6ce3-366999ad8316, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.508281] env[68040]: DEBUG oslo_concurrency.lockutils [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 853.508607] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Processing image 8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 853.508747] env[68040]: DEBUG oslo_concurrency.lockutils [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 854.179706] env[68040]: DEBUG nova.compute.manager [req-2e84a127-4d66-478f-a9db-dfce9ca130fe req-f0e0d390-d16d-4b88-ae5f-f4d73b7fe47f service nova] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Received event network-changed-05ac884e-4f2b-4030-9704-16d0d2f12f28 {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 854.179907] env[68040]: DEBUG nova.compute.manager [req-2e84a127-4d66-478f-a9db-dfce9ca130fe req-f0e0d390-d16d-4b88-ae5f-f4d73b7fe47f service nova] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Refreshing instance network info cache due to event network-changed-05ac884e-4f2b-4030-9704-16d0d2f12f28. {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 854.180196] env[68040]: DEBUG oslo_concurrency.lockutils [req-2e84a127-4d66-478f-a9db-dfce9ca130fe req-f0e0d390-d16d-4b88-ae5f-f4d73b7fe47f service nova] Acquiring lock "refresh_cache-1812f13e-b03d-48d4-940a-43974784265b" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 854.180363] env[68040]: DEBUG oslo_concurrency.lockutils [req-2e84a127-4d66-478f-a9db-dfce9ca130fe req-f0e0d390-d16d-4b88-ae5f-f4d73b7fe47f service nova] Acquired lock "refresh_cache-1812f13e-b03d-48d4-940a-43974784265b" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.180429] env[68040]: DEBUG nova.network.neutron [req-2e84a127-4d66-478f-a9db-dfce9ca130fe req-f0e0d390-d16d-4b88-ae5f-f4d73b7fe47f service nova] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Refreshing network info cache for port 05ac884e-4f2b-4030-9704-16d0d2f12f28 {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 854.454477] env[68040]: DEBUG nova.network.neutron [req-2e84a127-4d66-478f-a9db-dfce9ca130fe req-f0e0d390-d16d-4b88-ae5f-f4d73b7fe47f service nova] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Updated VIF entry in instance network info cache for port 05ac884e-4f2b-4030-9704-16d0d2f12f28. {{(pid=68040) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 854.454841] env[68040]: DEBUG nova.network.neutron [req-2e84a127-4d66-478f-a9db-dfce9ca130fe req-f0e0d390-d16d-4b88-ae5f-f4d73b7fe47f service nova] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Updating instance_info_cache with network_info: [{"id": "05ac884e-4f2b-4030-9704-16d0d2f12f28", "address": "fa:16:3e:e4:13:b9", "network": {"id": "5644a87e-3e5e-4ef0-b840-6886b5fb5b56", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-287057253-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "62e4f0e2f21a4996a7aa409aab702cec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32faf59b-014c-4f1f-8331-40df95bf741f", "external-id": "nsx-vlan-transportzone-996", "segmentation_id": 996, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05ac884e-4f", "ovs_interfaceid": "05ac884e-4f2b-4030-9704-16d0d2f12f28", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 854.466713] env[68040]: DEBUG oslo_concurrency.lockutils [req-2e84a127-4d66-478f-a9db-dfce9ca130fe req-f0e0d390-d16d-4b88-ae5f-f4d73b7fe47f service nova] Releasing lock "refresh_cache-1812f13e-b03d-48d4-940a-43974784265b" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 857.802458] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e3dd839b-a190-4cc9-b60e-3a463a3a1bba tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Acquiring lock "97b050ff-2997-4504-8787-04f1221251b8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 860.242951] env[68040]: DEBUG oslo_concurrency.lockutils [None req-61027daf-3ee3-4eb9-9bdd-3295b1dbcef6 tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Acquiring lock "b81d413c-2449-471a-b3d9-693fc0ab2824" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 861.286804] env[68040]: DEBUG oslo_concurrency.lockutils [None req-19e2ad11-bcf8-4b05-9b63-5a427215fc5c tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Acquiring lock "39de4e78-44cd-4582-998e-88ce6de2d51c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 866.576436] env[68040]: DEBUG oslo_concurrency.lockutils [None req-d36c7935-42dc-4ea6-b86f-4f1a3e27c6d1 tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Acquiring lock "a89ff564-ea35-4000-8efa-2c1ec2b61759" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 871.159812] env[68040]: DEBUG oslo_concurrency.lockutils [None req-bfba6510-551e-457e-b3e6-7b336ec2b617 tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Acquiring lock "1812f13e-b03d-48d4-940a-43974784265b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 871.437325] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ff56fe6d-fde1-47e9-8aed-c0a208592e0a tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Acquiring lock "42f39352-e703-4ebf-9559-4c8b5abca70e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 872.596155] env[68040]: DEBUG oslo_concurrency.lockutils [None req-f55598b6-2567-4a59-8df0-9afc2b7284d5 tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Acquiring lock "d1819f29-a891-47dd-a456-8f3b127daf6f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 880.628197] env[68040]: DEBUG oslo_concurrency.lockutils [None req-483c24ac-b097-4ee1-a0b8-1614fd992d11 tempest-ServersAaction247Test-113467702 tempest-ServersAaction247Test-113467702-project-member] Acquiring lock "b9c0cbae-d76a-4ec9-9cc8-727d011dc5f1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 880.629742] env[68040]: DEBUG oslo_concurrency.lockutils [None req-483c24ac-b097-4ee1-a0b8-1614fd992d11 tempest-ServersAaction247Test-113467702 tempest-ServersAaction247Test-113467702-project-member] Lock "b9c0cbae-d76a-4ec9-9cc8-727d011dc5f1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 881.844231] env[68040]: DEBUG oslo_concurrency.lockutils [None req-3e4dd475-92a0-4f4a-a305-e554b2311927 tempest-AttachInterfacesTestJSON-1449330779 tempest-AttachInterfacesTestJSON-1449330779-project-member] Acquiring lock "a51e5779-42bf-4281-8c46-1dcc771382ae" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 881.844475] env[68040]: DEBUG oslo_concurrency.lockutils [None req-3e4dd475-92a0-4f4a-a305-e554b2311927 tempest-AttachInterfacesTestJSON-1449330779 tempest-AttachInterfacesTestJSON-1449330779-project-member] Lock "a51e5779-42bf-4281-8c46-1dcc771382ae" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 893.875357] env[68040]: DEBUG oslo_concurrency.lockutils [None req-aeafd005-940e-4864-ba05-ef16240ef2ee tempest-VolumesAdminNegativeTest-1864683811 tempest-VolumesAdminNegativeTest-1864683811-project-member] Acquiring lock "f740b2a2-a3f5-45d7-913f-5455236a2620" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 893.875705] env[68040]: DEBUG oslo_concurrency.lockutils [None req-aeafd005-940e-4864-ba05-ef16240ef2ee tempest-VolumesAdminNegativeTest-1864683811 tempest-VolumesAdminNegativeTest-1864683811-project-member] Lock "f740b2a2-a3f5-45d7-913f-5455236a2620" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 894.599350] env[68040]: WARNING oslo_vmware.rw_handles [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 894.599350] env[68040]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 894.599350] env[68040]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 894.599350] env[68040]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 894.599350] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 894.599350] env[68040]: ERROR oslo_vmware.rw_handles response.begin() [ 894.599350] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 894.599350] env[68040]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 894.599350] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 894.599350] env[68040]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 894.599350] env[68040]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 894.599350] env[68040]: ERROR oslo_vmware.rw_handles [ 894.599783] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Downloaded image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to vmware_temp/0ce520ab-dde7-49a8-b8c4-31d751902048/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 894.601687] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Caching image {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 894.601949] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Copying Virtual Disk [datastore2] vmware_temp/0ce520ab-dde7-49a8-b8c4-31d751902048/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk to [datastore2] vmware_temp/0ce520ab-dde7-49a8-b8c4-31d751902048/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk {{(pid=68040) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 894.602530] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b98376b3-7563-4ef7-97d7-b02ea75c54c5 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.610653] env[68040]: DEBUG oslo_vmware.api [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Waiting for the task: (returnval){ [ 894.610653] env[68040]: value = "task-3200204" [ 894.610653] env[68040]: _type = "Task" [ 894.610653] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.618639] env[68040]: DEBUG oslo_vmware.api [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Task: {'id': task-3200204, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.121355] env[68040]: DEBUG oslo_vmware.exceptions [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Fault InvalidArgument not matched. {{(pid=68040) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 895.121703] env[68040]: DEBUG oslo_concurrency.lockutils [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 895.122299] env[68040]: ERROR nova.compute.manager [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 895.122299] env[68040]: Faults: ['InvalidArgument'] [ 895.122299] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Traceback (most recent call last): [ 895.122299] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 895.122299] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] yield resources [ 895.122299] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 895.122299] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] self.driver.spawn(context, instance, image_meta, [ 895.122299] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 895.122299] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] self._vmops.spawn(context, instance, image_meta, injected_files, [ 895.122299] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 895.122299] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] self._fetch_image_if_missing(context, vi) [ 895.122299] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 895.122667] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] image_cache(vi, tmp_image_ds_loc) [ 895.122667] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 895.122667] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] vm_util.copy_virtual_disk( [ 895.122667] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 895.122667] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] session._wait_for_task(vmdk_copy_task) [ 895.122667] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 895.122667] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] return self.wait_for_task(task_ref) [ 895.122667] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 895.122667] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] return evt.wait() [ 895.122667] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 895.122667] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] result = hub.switch() [ 895.122667] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 895.122667] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] return self.greenlet.switch() [ 895.123011] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 895.123011] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] self.f(*self.args, **self.kw) [ 895.123011] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 895.123011] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] raise exceptions.translate_fault(task_info.error) [ 895.123011] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 895.123011] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Faults: ['InvalidArgument'] [ 895.123011] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] [ 895.123011] env[68040]: INFO nova.compute.manager [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Terminating instance [ 895.124621] env[68040]: DEBUG oslo_concurrency.lockutils [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.124898] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 895.125617] env[68040]: DEBUG nova.compute.manager [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 895.125878] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 895.126156] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8400f9f8-105f-49a5-9536-bfb7ab0ec875 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.128581] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-610e4541-7bc6-48ef-9dcf-cab09e811ed9 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.137785] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Unregistering the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 895.137785] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f1a5342c-c3dc-40c3-9a8f-599f46b1c6e4 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.138528] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 895.138756] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68040) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 895.139466] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-799a0357-40b1-4df1-8576-077400ff7488 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.144862] env[68040]: DEBUG oslo_vmware.api [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Waiting for the task: (returnval){ [ 895.144862] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]5277c7ba-7832-5807-199a-5a5e82dbec8e" [ 895.144862] env[68040]: _type = "Task" [ 895.144862] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.156099] env[68040]: DEBUG oslo_vmware.api [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]5277c7ba-7832-5807-199a-5a5e82dbec8e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.210025] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Unregistered the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 895.210025] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Deleting contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 895.210025] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Deleting the datastore file [datastore2] f23e3529-19a6-4562-ae9b-591d1a452385 {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 895.210025] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dfdc4c78-acef-4ac9-816f-3d286c94ec23 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.215677] env[68040]: DEBUG oslo_vmware.api [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Waiting for the task: (returnval){ [ 895.215677] env[68040]: value = "task-3200206" [ 895.215677] env[68040]: _type = "Task" [ 895.215677] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.225502] env[68040]: DEBUG oslo_vmware.api [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Task: {'id': task-3200206, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.657568] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Preparing fetch location {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 895.657980] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Creating directory with path [datastore2] vmware_temp/bc9294db-a802-4464-90dc-1828d31ef73e/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 895.658324] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ccf7ead8-fcde-43f7-a0e7-40f6385ec759 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.679191] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Created directory with path [datastore2] vmware_temp/bc9294db-a802-4464-90dc-1828d31ef73e/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 895.679416] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Fetch image to [datastore2] vmware_temp/bc9294db-a802-4464-90dc-1828d31ef73e/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 895.679589] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to [datastore2] vmware_temp/bc9294db-a802-4464-90dc-1828d31ef73e/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 895.680432] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3cbd849-3cd0-4255-807c-b768fe79a7ee {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.689295] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e24ed5b-2b0b-460f-884d-4b55ab308662 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.699386] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ed985b4-48b6-450b-ba4a-ebc7f87cee6e {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.732227] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-140d0c9f-da43-48c7-bae4-98ffc68d1532 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.739365] env[68040]: DEBUG oslo_vmware.api [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Task: {'id': task-3200206, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07619} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.741452] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Deleted the datastore file {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 895.741452] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Deleted contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 895.741635] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 895.741832] env[68040]: INFO nova.compute.manager [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Took 0.62 seconds to destroy the instance on the hypervisor. [ 895.744385] env[68040]: DEBUG nova.compute.claims [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Aborting claim: {{(pid=68040) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 895.744596] env[68040]: DEBUG oslo_concurrency.lockutils [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 895.744796] env[68040]: DEBUG oslo_concurrency.lockutils [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 895.747312] env[68040]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-e7037879-ea3f-4d98-8998-a594f61ceb76 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.772139] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 895.855016] env[68040]: DEBUG oslo_vmware.rw_handles [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/bc9294db-a802-4464-90dc-1828d31ef73e/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 895.915080] env[68040]: DEBUG oslo_vmware.rw_handles [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Completed reading data from the image iterator. {{(pid=68040) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 895.915281] env[68040]: DEBUG oslo_vmware.rw_handles [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/bc9294db-a802-4464-90dc-1828d31ef73e/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 896.249898] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a61a6a7-8f6d-4a61-bfe3-ded54238d930 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.257996] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2cb148a-db8f-4c9f-b333-c18957ddbf4d {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.291058] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1f6c58f-11ec-4c3b-93eb-dda7633d8ae6 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.298810] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18ca887d-c7e5-4818-9e4b-24762dd2fb53 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.313141] env[68040]: DEBUG nova.compute.provider_tree [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 896.343289] env[68040]: DEBUG nova.scheduler.client.report [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 896.361448] env[68040]: DEBUG oslo_concurrency.lockutils [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.616s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 896.361975] env[68040]: ERROR nova.compute.manager [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 896.361975] env[68040]: Faults: ['InvalidArgument'] [ 896.361975] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Traceback (most recent call last): [ 896.361975] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 896.361975] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] self.driver.spawn(context, instance, image_meta, [ 896.361975] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 896.361975] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] self._vmops.spawn(context, instance, image_meta, injected_files, [ 896.361975] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 896.361975] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] self._fetch_image_if_missing(context, vi) [ 896.361975] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 896.361975] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] image_cache(vi, tmp_image_ds_loc) [ 896.361975] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 896.362495] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] vm_util.copy_virtual_disk( [ 896.362495] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 896.362495] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] session._wait_for_task(vmdk_copy_task) [ 896.362495] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 896.362495] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] return self.wait_for_task(task_ref) [ 896.362495] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 896.362495] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] return evt.wait() [ 896.362495] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 896.362495] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] result = hub.switch() [ 896.362495] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 896.362495] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] return self.greenlet.switch() [ 896.362495] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 896.362495] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] self.f(*self.args, **self.kw) [ 896.362819] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 896.362819] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] raise exceptions.translate_fault(task_info.error) [ 896.362819] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 896.362819] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Faults: ['InvalidArgument'] [ 896.362819] env[68040]: ERROR nova.compute.manager [instance: f23e3529-19a6-4562-ae9b-591d1a452385] [ 896.362936] env[68040]: DEBUG nova.compute.utils [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] VimFaultException {{(pid=68040) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 896.365071] env[68040]: DEBUG nova.compute.manager [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Build of instance f23e3529-19a6-4562-ae9b-591d1a452385 was re-scheduled: A specified parameter was not correct: fileType [ 896.365071] env[68040]: Faults: ['InvalidArgument'] {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 896.365463] env[68040]: DEBUG nova.compute.manager [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Unplugging VIFs for instance {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 896.365641] env[68040]: DEBUG nova.compute.manager [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 896.365841] env[68040]: DEBUG nova.compute.manager [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 896.366045] env[68040]: DEBUG nova.network.neutron [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 896.708324] env[68040]: DEBUG nova.network.neutron [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 896.721702] env[68040]: INFO nova.compute.manager [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Took 0.36 seconds to deallocate network for instance. [ 896.823040] env[68040]: INFO nova.scheduler.client.report [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Deleted allocations for instance f23e3529-19a6-4562-ae9b-591d1a452385 [ 896.845207] env[68040]: DEBUG oslo_concurrency.lockutils [None req-85306673-a000-4a0d-b0f3-4ea3563344f2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Lock "f23e3529-19a6-4562-ae9b-591d1a452385" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 246.799s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 896.847684] env[68040]: DEBUG oslo_concurrency.lockutils [None req-c6b25696-e488-4b01-a26c-da2b3bff00b2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Lock "f23e3529-19a6-4562-ae9b-591d1a452385" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 47.188s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 896.847894] env[68040]: DEBUG oslo_concurrency.lockutils [None req-c6b25696-e488-4b01-a26c-da2b3bff00b2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Acquiring lock "f23e3529-19a6-4562-ae9b-591d1a452385-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 896.848132] env[68040]: DEBUG oslo_concurrency.lockutils [None req-c6b25696-e488-4b01-a26c-da2b3bff00b2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Lock "f23e3529-19a6-4562-ae9b-591d1a452385-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 896.848493] env[68040]: DEBUG oslo_concurrency.lockutils [None req-c6b25696-e488-4b01-a26c-da2b3bff00b2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Lock "f23e3529-19a6-4562-ae9b-591d1a452385-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 896.850632] env[68040]: INFO nova.compute.manager [None req-c6b25696-e488-4b01-a26c-da2b3bff00b2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Terminating instance [ 896.852258] env[68040]: DEBUG nova.compute.manager [None req-c6b25696-e488-4b01-a26c-da2b3bff00b2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 896.852478] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-c6b25696-e488-4b01-a26c-da2b3bff00b2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 896.852944] env[68040]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2e6e846e-a735-4801-954c-e1e742c8ad26 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.856962] env[68040]: DEBUG nova.compute.manager [None req-a5908c10-87e2-4461-8947-c87989df5100 tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] [instance: a7853da1-b00c-4b05-8f4a-f928fcb59cb2] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 896.863202] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a83bc11-5272-473a-86ac-d2b71fbdb947 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.883625] env[68040]: DEBUG nova.compute.manager [None req-a5908c10-87e2-4461-8947-c87989df5100 tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] [instance: a7853da1-b00c-4b05-8f4a-f928fcb59cb2] Instance disappeared before build. {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 896.893637] env[68040]: WARNING nova.virt.vmwareapi.vmops [None req-c6b25696-e488-4b01-a26c-da2b3bff00b2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f23e3529-19a6-4562-ae9b-591d1a452385 could not be found. [ 896.893868] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-c6b25696-e488-4b01-a26c-da2b3bff00b2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 896.894080] env[68040]: INFO nova.compute.manager [None req-c6b25696-e488-4b01-a26c-da2b3bff00b2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Took 0.04 seconds to destroy the instance on the hypervisor. [ 896.894328] env[68040]: DEBUG oslo.service.loopingcall [None req-c6b25696-e488-4b01-a26c-da2b3bff00b2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 896.894743] env[68040]: DEBUG nova.compute.manager [-] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 896.894842] env[68040]: DEBUG nova.network.neutron [-] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 896.908631] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a5908c10-87e2-4461-8947-c87989df5100 tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Lock "a7853da1-b00c-4b05-8f4a-f928fcb59cb2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 224.732s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 896.918341] env[68040]: DEBUG nova.network.neutron [-] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 896.919770] env[68040]: DEBUG nova.compute.manager [None req-9f583b64-9d44-4375-801a-6272c17e1723 tempest-TenantUsagesTestJSON-4403897 tempest-TenantUsagesTestJSON-4403897-project-member] [instance: 4e0f3617-aef9-4d66-8243-ff530b4084cd] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 896.927905] env[68040]: INFO nova.compute.manager [-] [instance: f23e3529-19a6-4562-ae9b-591d1a452385] Took 0.03 seconds to deallocate network for instance. [ 896.952992] env[68040]: DEBUG nova.compute.manager [None req-9f583b64-9d44-4375-801a-6272c17e1723 tempest-TenantUsagesTestJSON-4403897 tempest-TenantUsagesTestJSON-4403897-project-member] [instance: 4e0f3617-aef9-4d66-8243-ff530b4084cd] Instance disappeared before build. {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 896.977502] env[68040]: DEBUG oslo_concurrency.lockutils [None req-9f583b64-9d44-4375-801a-6272c17e1723 tempest-TenantUsagesTestJSON-4403897 tempest-TenantUsagesTestJSON-4403897-project-member] Lock "4e0f3617-aef9-4d66-8243-ff530b4084cd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 221.239s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 896.989582] env[68040]: DEBUG nova.compute.manager [None req-a224b37e-6527-43da-bde8-66de547e9a8b tempest-ImagesOneServerTestJSON-475436907 tempest-ImagesOneServerTestJSON-475436907-project-member] [instance: 28fd3e76-1a9b-4273-b951-e50a8506a9bc] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 897.018488] env[68040]: DEBUG nova.compute.manager [None req-a224b37e-6527-43da-bde8-66de547e9a8b tempest-ImagesOneServerTestJSON-475436907 tempest-ImagesOneServerTestJSON-475436907-project-member] [instance: 28fd3e76-1a9b-4273-b951-e50a8506a9bc] Instance disappeared before build. {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 897.039853] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a224b37e-6527-43da-bde8-66de547e9a8b tempest-ImagesOneServerTestJSON-475436907 tempest-ImagesOneServerTestJSON-475436907-project-member] Lock "28fd3e76-1a9b-4273-b951-e50a8506a9bc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 220.273s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 897.059561] env[68040]: DEBUG oslo_concurrency.lockutils [None req-c6b25696-e488-4b01-a26c-da2b3bff00b2 tempest-InstanceActionsTestJSON-301596121 tempest-InstanceActionsTestJSON-301596121-project-member] Lock "f23e3529-19a6-4562-ae9b-591d1a452385" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.213s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 897.060905] env[68040]: DEBUG nova.compute.manager [None req-a4e9dc12-89b9-46c4-a6e5-bc2dfa6d256f tempest-VolumesAssistedSnapshotsTest-914443928 tempest-VolumesAssistedSnapshotsTest-914443928-project-member] [instance: 810b9f3c-7a92-40ab-8630-5c1ad6e4762c] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 897.109691] env[68040]: DEBUG nova.compute.manager [None req-a4e9dc12-89b9-46c4-a6e5-bc2dfa6d256f tempest-VolumesAssistedSnapshotsTest-914443928 tempest-VolumesAssistedSnapshotsTest-914443928-project-member] [instance: 810b9f3c-7a92-40ab-8630-5c1ad6e4762c] Instance disappeared before build. {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 897.131767] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a4e9dc12-89b9-46c4-a6e5-bc2dfa6d256f tempest-VolumesAssistedSnapshotsTest-914443928 tempest-VolumesAssistedSnapshotsTest-914443928-project-member] Lock "810b9f3c-7a92-40ab-8630-5c1ad6e4762c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 220.281s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 897.144453] env[68040]: DEBUG nova.compute.manager [None req-f2820da5-a2ae-477a-a262-ab33174148d0 tempest-ServersWithSpecificFlavorTestJSON-83255541 tempest-ServersWithSpecificFlavorTestJSON-83255541-project-member] [instance: 0a9103d6-2461-4ed3-93fa-a0149ccc5267] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 897.172034] env[68040]: DEBUG nova.compute.manager [None req-f2820da5-a2ae-477a-a262-ab33174148d0 tempest-ServersWithSpecificFlavorTestJSON-83255541 tempest-ServersWithSpecificFlavorTestJSON-83255541-project-member] [instance: 0a9103d6-2461-4ed3-93fa-a0149ccc5267] Instance disappeared before build. {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 897.196713] env[68040]: DEBUG oslo_concurrency.lockutils [None req-f2820da5-a2ae-477a-a262-ab33174148d0 tempest-ServersWithSpecificFlavorTestJSON-83255541 tempest-ServersWithSpecificFlavorTestJSON-83255541-project-member] Lock "0a9103d6-2461-4ed3-93fa-a0149ccc5267" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 218.827s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 897.208034] env[68040]: DEBUG nova.compute.manager [None req-028640e1-c0bb-4655-9d8a-614aac499dac tempest-ServerActionsTestOtherA-519480154 tempest-ServerActionsTestOtherA-519480154-project-member] [instance: 0a0d385d-3255-4755-8987-a26cd28006cb] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 897.234176] env[68040]: DEBUG nova.compute.manager [None req-028640e1-c0bb-4655-9d8a-614aac499dac tempest-ServerActionsTestOtherA-519480154 tempest-ServerActionsTestOtherA-519480154-project-member] [instance: 0a0d385d-3255-4755-8987-a26cd28006cb] Instance disappeared before build. {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 897.256266] env[68040]: DEBUG oslo_concurrency.lockutils [None req-028640e1-c0bb-4655-9d8a-614aac499dac tempest-ServerActionsTestOtherA-519480154 tempest-ServerActionsTestOtherA-519480154-project-member] Lock "0a0d385d-3255-4755-8987-a26cd28006cb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 217.645s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 897.267166] env[68040]: DEBUG nova.compute.manager [None req-76302229-e0af-42eb-9319-827f9c2429cf tempest-ServerDiagnosticsNegativeTest-1386750677 tempest-ServerDiagnosticsNegativeTest-1386750677-project-member] [instance: 0cc0463e-1e55-4dd5-96b2-ee15025e689b] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 897.291957] env[68040]: DEBUG nova.compute.manager [None req-76302229-e0af-42eb-9319-827f9c2429cf tempest-ServerDiagnosticsNegativeTest-1386750677 tempest-ServerDiagnosticsNegativeTest-1386750677-project-member] [instance: 0cc0463e-1e55-4dd5-96b2-ee15025e689b] Instance disappeared before build. {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 897.317044] env[68040]: DEBUG oslo_concurrency.lockutils [None req-76302229-e0af-42eb-9319-827f9c2429cf tempest-ServerDiagnosticsNegativeTest-1386750677 tempest-ServerDiagnosticsNegativeTest-1386750677-project-member] Lock "0cc0463e-1e55-4dd5-96b2-ee15025e689b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 213.380s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 897.327383] env[68040]: DEBUG nova.compute.manager [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 897.391806] env[68040]: DEBUG oslo_concurrency.lockutils [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 897.392079] env[68040]: DEBUG oslo_concurrency.lockutils [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 897.393529] env[68040]: INFO nova.compute.claims [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 897.829903] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ead8a7e-92cd-46a9-9966-bc1abd358cb0 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.837889] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81b3c9fd-b551-421b-823d-7921c839dcc9 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.868782] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8b3c2e0-425c-48b7-8baf-c3ac4732a0a0 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.876144] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cd4ec56-af5a-4ba7-9752-e9a7c7c49418 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.889126] env[68040]: DEBUG nova.compute.provider_tree [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 897.899515] env[68040]: DEBUG nova.scheduler.client.report [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 897.916416] env[68040]: DEBUG oslo_concurrency.lockutils [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.524s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 897.916925] env[68040]: DEBUG nova.compute.manager [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Start building networks asynchronously for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 897.969084] env[68040]: DEBUG nova.compute.utils [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Using /dev/sd instead of None {{(pid=68040) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 897.970554] env[68040]: DEBUG nova.compute.manager [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Not allocating networking since 'none' was specified. {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1968}} [ 897.981071] env[68040]: DEBUG nova.compute.manager [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Start building block device mappings for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 898.053850] env[68040]: DEBUG nova.compute.manager [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Start spawning the instance on the hypervisor. {{(pid=68040) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 898.093223] env[68040]: DEBUG nova.virt.hardware [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:59:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:59:33Z,direct_url=,disk_format='vmdk',id=8c308313-03d5-40b6-a5fe-9037e32dc76e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0770d674a39c40089de0aade9440b370',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:59:34Z,virtual_size=,visibility=), allow threads: False {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 898.093473] env[68040]: DEBUG nova.virt.hardware [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Flavor limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 898.094270] env[68040]: DEBUG nova.virt.hardware [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Image limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 898.094270] env[68040]: DEBUG nova.virt.hardware [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Flavor pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 898.094270] env[68040]: DEBUG nova.virt.hardware [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Image pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 898.094418] env[68040]: DEBUG nova.virt.hardware [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 898.094703] env[68040]: DEBUG nova.virt.hardware [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 898.095683] env[68040]: DEBUG nova.virt.hardware [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 898.095683] env[68040]: DEBUG nova.virt.hardware [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Got 1 possible topologies {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 898.095683] env[68040]: DEBUG nova.virt.hardware [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 898.095683] env[68040]: DEBUG nova.virt.hardware [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 898.097167] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b09adf3-69a9-4c95-bd43-5c1a79ba9506 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.106370] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-befda9b0-ebad-4989-85a5-f9a68ac05a7a {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.121157] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Instance VIF info [] {{(pid=68040) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 898.129037] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Creating folder: Project (67d430ee46e449b6b6a1b248d9520bdc). Parent ref: group-v639956. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 898.130526] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-577c5a5b-6f9b-408d-bca2-daa017e503b6 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.140548] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Created folder: Project (67d430ee46e449b6b6a1b248d9520bdc) in parent group-v639956. [ 898.140812] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Creating folder: Instances. Parent ref: group-v640001. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 898.141134] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-098ff53f-1fcc-434e-a748-7a9b62002c79 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.149329] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Created folder: Instances in parent group-v640001. [ 898.149574] env[68040]: DEBUG oslo.service.loopingcall [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 898.149760] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Creating VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 898.149958] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1d12b869-874c-4c89-92db-acb2facd3691 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.166474] env[68040]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 898.166474] env[68040]: value = "task-3200209" [ 898.166474] env[68040]: _type = "Task" [ 898.166474] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.174042] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200209, 'name': CreateVM_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.677555] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200209, 'name': CreateVM_Task, 'duration_secs': 0.284847} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.677852] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Created VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 898.678166] env[68040]: DEBUG oslo_concurrency.lockutils [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 898.678336] env[68040]: DEBUG oslo_concurrency.lockutils [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 898.678646] env[68040]: DEBUG oslo_concurrency.lockutils [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 898.678889] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f325fc1-37a6-40ad-9239-a7c165a755a1 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.683141] env[68040]: DEBUG oslo_vmware.api [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Waiting for the task: (returnval){ [ 898.683141] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52d6f615-2e76-6fab-d078-0a48ad348f18" [ 898.683141] env[68040]: _type = "Task" [ 898.683141] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.690629] env[68040]: DEBUG oslo_vmware.api [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52d6f615-2e76-6fab-d078-0a48ad348f18, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.984600] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 898.984772] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Cleaning up deleted instances {{(pid=68040) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11219}} [ 898.999587] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] There are 0 instances to clean {{(pid=68040) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 898.999869] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 899.000052] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Cleaning up deleted instances with incomplete migration {{(pid=68040) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11257}} [ 899.019258] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 899.195839] env[68040]: DEBUG oslo_concurrency.lockutils [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 899.195839] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Processing image 8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 899.195839] env[68040]: DEBUG oslo_concurrency.lockutils [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 900.358558] env[68040]: DEBUG oslo_concurrency.lockutils [None req-73f9695a-d179-44c6-ab90-76938bbaa53b tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Acquiring lock "de1b8ef9-0088-4d2a-985e-d04fcff55d31" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 902.029360] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 902.029671] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 902.029786] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 902.029932] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 902.989016] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 902.989016] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Starting heal instance info cache {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 902.989016] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Rebuilding the list of instances to heal {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 903.014930] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 903.015539] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 903.015736] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 903.015855] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 903.016021] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 903.016156] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 903.016655] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 903.016825] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 903.016974] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 903.017121] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 903.017764] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Didn't find any instances for network info cache update. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 903.018878] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 903.033887] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 903.034864] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 903.035017] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 903.035174] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68040) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 903.036361] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d59dcf8-1203-4ab4-a567-8670045bb492 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.045236] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eb1138d-d3a5-46d0-abad-9d02556aef84 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.062174] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5abdd024-9caa-491b-b502-f80178675af3 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.067997] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-110287d8-a48f-494b-8191-9da2486bb485 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.100091] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180998MB free_disk=125GB free_vcpus=48 pci_devices=None {{(pid=68040) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 903.100091] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 903.100261] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 903.254629] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Acquiring lock "e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 903.255141] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Lock "e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 903.290764] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 903.290933] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 3411cba3-71c9-4334-bc79-4e322f4231f1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 903.291206] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 97b050ff-2997-4504-8787-04f1221251b8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 903.291206] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance b81d413c-2449-471a-b3d9-693fc0ab2824 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 903.291474] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 39de4e78-44cd-4582-998e-88ce6de2d51c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 903.291474] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance a89ff564-ea35-4000-8efa-2c1ec2b61759 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 903.291670] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 42f39352-e703-4ebf-9559-4c8b5abca70e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 903.291670] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance d1819f29-a891-47dd-a456-8f3b127daf6f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 903.291792] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 1812f13e-b03d-48d4-940a-43974784265b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 903.292108] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance de1b8ef9-0088-4d2a-985e-d04fcff55d31 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 903.308569] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance ba480b6b-3d33-4f60-b045-21fe059fd0a6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 903.322694] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 0b61102f-1b2e-4962-b94b-d27d394c5aef has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 903.338351] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 856ec4dd-3a1d-4140-b3d5-52690cf87f92 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 903.352077] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance dae1bdde-e497-4ee6-9582-4988c5ae7a96 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 903.366178] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 1b382d10-944a-4817-b959-c8ad2664309e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 903.385243] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 37f5c20e-dbc4-46a3-a83f-c7329f7a764c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 903.400659] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 25cb12ed-d0ed-402f-ba73-3c6c835adb17 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 903.419835] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance d42d2ca9-ac93-4efb-92a0-de248221dd43 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 903.432733] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance db379674-cc77-430b-bd6d-2f674d57a7ee has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 903.446922] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance f17efcc0-2a35-4360-abdf-1543a4cd0fcc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 903.465916] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance bce68a2b-260c-45cc-ac98-d4b01b4513a4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 903.477503] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 1e43f6be-f6a3-4569-adea-c82a5d709247 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 903.494671] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 3738de32-79cd-4b04-8081-cc1146730c75 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 903.513349] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance b9c0cbae-d76a-4ec9-9cc8-727d011dc5f1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 903.530045] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance a51e5779-42bf-4281-8c46-1dcc771382ae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 903.541019] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance f740b2a2-a3f5-45d7-913f-5455236a2620 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 903.554608] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 903.554608] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 903.554773] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 903.956022] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f75be38-e27c-4375-831e-eb55f50a746c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.964768] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7706a872-e3b5-4f76-bf56-c0475a0da2e4 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.995404] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf38a25d-2266-437f-a96c-9f608a31c2ab {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.003171] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-952a6394-aef1-43ad-ba99-1c654cc0fd0d {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.018260] env[68040]: DEBUG nova.compute.provider_tree [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 904.026941] env[68040]: DEBUG nova.scheduler.client.report [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 904.049822] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68040) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 904.050075] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.950s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 905.015791] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 905.016221] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 905.016444] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 905.016601] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68040) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 908.824396] env[68040]: DEBUG oslo_concurrency.lockutils [None req-40811468-782c-4f4a-bd9d-fd0224b67b79 tempest-MultipleCreateTestJSON-225064095 tempest-MultipleCreateTestJSON-225064095-project-member] Acquiring lock "3f9438b9-bfe3-4a7f-bfec-e140fed5f66c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 908.824636] env[68040]: DEBUG oslo_concurrency.lockutils [None req-40811468-782c-4f4a-bd9d-fd0224b67b79 tempest-MultipleCreateTestJSON-225064095 tempest-MultipleCreateTestJSON-225064095-project-member] Lock "3f9438b9-bfe3-4a7f-bfec-e140fed5f66c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 908.865644] env[68040]: DEBUG oslo_concurrency.lockutils [None req-40811468-782c-4f4a-bd9d-fd0224b67b79 tempest-MultipleCreateTestJSON-225064095 tempest-MultipleCreateTestJSON-225064095-project-member] Acquiring lock "e2cd4cbd-279b-4852-85b0-f78af45bf7e2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 908.865644] env[68040]: DEBUG oslo_concurrency.lockutils [None req-40811468-782c-4f4a-bd9d-fd0224b67b79 tempest-MultipleCreateTestJSON-225064095 tempest-MultipleCreateTestJSON-225064095-project-member] Lock "e2cd4cbd-279b-4852-85b0-f78af45bf7e2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 909.547254] env[68040]: DEBUG oslo_concurrency.lockutils [None req-db81460a-d299-4a77-a426-bf8dc826c72c tempest-ServerActionsTestJSON-706247280 tempest-ServerActionsTestJSON-706247280-project-member] Acquiring lock "8ec0f6bd-4a3e-4e70-b310-714676607b9c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 909.547488] env[68040]: DEBUG oslo_concurrency.lockutils [None req-db81460a-d299-4a77-a426-bf8dc826c72c tempest-ServerActionsTestJSON-706247280 tempest-ServerActionsTestJSON-706247280-project-member] Lock "8ec0f6bd-4a3e-4e70-b310-714676607b9c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 911.124260] env[68040]: DEBUG oslo_concurrency.lockutils [None req-010535a2-5c2c-4124-bd7d-0ad1b27a34e1 tempest-ServerMetadataTestJSON-1008625601 tempest-ServerMetadataTestJSON-1008625601-project-member] Acquiring lock "25298be1-8cc9-46fa-9b33-62425bcb91dc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 911.124511] env[68040]: DEBUG oslo_concurrency.lockutils [None req-010535a2-5c2c-4124-bd7d-0ad1b27a34e1 tempest-ServerMetadataTestJSON-1008625601 tempest-ServerMetadataTestJSON-1008625601-project-member] Lock "25298be1-8cc9-46fa-9b33-62425bcb91dc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 913.758631] env[68040]: DEBUG oslo_concurrency.lockutils [None req-1c2fbfe0-bfd1-43fe-b06b-7f3fa185a788 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Acquiring lock "67756ba9-5738-4669-ace9-a3d2f1952dfa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 913.759345] env[68040]: DEBUG oslo_concurrency.lockutils [None req-1c2fbfe0-bfd1-43fe-b06b-7f3fa185a788 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Lock "67756ba9-5738-4669-ace9-a3d2f1952dfa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 918.259663] env[68040]: DEBUG oslo_concurrency.lockutils [None req-f759feb5-2012-4bf9-8b20-3541746c3a9a tempest-SecurityGroupsTestJSON-810175009 tempest-SecurityGroupsTestJSON-810175009-project-member] Acquiring lock "940a6a43-d74e-419c-af5b-92c991e3649d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 918.259663] env[68040]: DEBUG oslo_concurrency.lockutils [None req-f759feb5-2012-4bf9-8b20-3541746c3a9a tempest-SecurityGroupsTestJSON-810175009 tempest-SecurityGroupsTestJSON-810175009-project-member] Lock "940a6a43-d74e-419c-af5b-92c991e3649d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 928.275546] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4857b5a4-1338-4d93-af7c-84ff79ed1756 tempest-ServersTestMultiNic-1524601141 tempest-ServersTestMultiNic-1524601141-project-member] Acquiring lock "1b4b422a-f096-4ed1-9d47-f150e7a3434f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 928.275868] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4857b5a4-1338-4d93-af7c-84ff79ed1756 tempest-ServersTestMultiNic-1524601141 tempest-ServersTestMultiNic-1524601141-project-member] Lock "1b4b422a-f096-4ed1-9d47-f150e7a3434f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 941.815953] env[68040]: WARNING oslo_vmware.rw_handles [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 941.815953] env[68040]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 941.815953] env[68040]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 941.815953] env[68040]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 941.815953] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 941.815953] env[68040]: ERROR oslo_vmware.rw_handles response.begin() [ 941.815953] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 941.815953] env[68040]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 941.815953] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 941.815953] env[68040]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 941.815953] env[68040]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 941.815953] env[68040]: ERROR oslo_vmware.rw_handles [ 941.816619] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Downloaded image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to vmware_temp/bc9294db-a802-4464-90dc-1828d31ef73e/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 941.818312] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Caching image {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 941.818594] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Copying Virtual Disk [datastore2] vmware_temp/bc9294db-a802-4464-90dc-1828d31ef73e/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk to [datastore2] vmware_temp/bc9294db-a802-4464-90dc-1828d31ef73e/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk {{(pid=68040) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 941.818886] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c9615852-a2cc-4c35-821f-b1c812966d4c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.827823] env[68040]: DEBUG oslo_vmware.api [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Waiting for the task: (returnval){ [ 941.827823] env[68040]: value = "task-3200210" [ 941.827823] env[68040]: _type = "Task" [ 941.827823] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.834988] env[68040]: DEBUG oslo_vmware.api [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Task: {'id': task-3200210, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.338027] env[68040]: DEBUG oslo_vmware.exceptions [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Fault InvalidArgument not matched. {{(pid=68040) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 942.338313] env[68040]: DEBUG oslo_concurrency.lockutils [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 942.339054] env[68040]: ERROR nova.compute.manager [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 942.339054] env[68040]: Faults: ['InvalidArgument'] [ 942.339054] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Traceback (most recent call last): [ 942.339054] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 942.339054] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] yield resources [ 942.339054] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 942.339054] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] self.driver.spawn(context, instance, image_meta, [ 942.339054] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 942.339054] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] self._vmops.spawn(context, instance, image_meta, injected_files, [ 942.339054] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 942.339054] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] self._fetch_image_if_missing(context, vi) [ 942.339054] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 942.339549] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] image_cache(vi, tmp_image_ds_loc) [ 942.339549] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 942.339549] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] vm_util.copy_virtual_disk( [ 942.339549] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 942.339549] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] session._wait_for_task(vmdk_copy_task) [ 942.339549] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 942.339549] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] return self.wait_for_task(task_ref) [ 942.339549] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 942.339549] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] return evt.wait() [ 942.339549] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 942.339549] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] result = hub.switch() [ 942.339549] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 942.339549] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] return self.greenlet.switch() [ 942.339954] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 942.339954] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] self.f(*self.args, **self.kw) [ 942.339954] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 942.339954] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] raise exceptions.translate_fault(task_info.error) [ 942.339954] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 942.339954] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Faults: ['InvalidArgument'] [ 942.339954] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] [ 942.339954] env[68040]: INFO nova.compute.manager [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Terminating instance [ 942.340899] env[68040]: DEBUG oslo_concurrency.lockutils [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.341118] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 942.341352] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-00dc5ebf-2f18-49d1-9730-17d49a2d5047 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.343517] env[68040]: DEBUG nova.compute.manager [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 942.343732] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 942.344455] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84a4fd0c-2812-4fee-9921-94307f4eef88 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.350969] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Unregistering the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 942.351198] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-72bf29f0-efb3-4a60-8153-a36e9ef22ef2 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.353321] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 942.353501] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68040) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 942.354469] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87a09a14-cac1-4208-809a-d3a089b57a5e {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.359066] env[68040]: DEBUG oslo_vmware.api [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Waiting for the task: (returnval){ [ 942.359066] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52faafbc-ae3e-6fb0-fac5-b17d80105848" [ 942.359066] env[68040]: _type = "Task" [ 942.359066] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.367878] env[68040]: DEBUG oslo_vmware.api [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52faafbc-ae3e-6fb0-fac5-b17d80105848, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.415234] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Unregistered the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 942.415489] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Deleting contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 942.415716] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Deleting the datastore file [datastore2] 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45 {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 942.415987] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5ca59275-8690-4604-b3b7-4dc11a6c5cb8 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.422732] env[68040]: DEBUG oslo_vmware.api [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Waiting for the task: (returnval){ [ 942.422732] env[68040]: value = "task-3200212" [ 942.422732] env[68040]: _type = "Task" [ 942.422732] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.869161] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] Preparing fetch location {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 942.869431] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Creating directory with path [datastore2] vmware_temp/2612fcc1-6828-4f3d-be83-ae1f64706579/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 942.869678] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bbbe71d5-1a55-4019-99fd-34e964e4b1fa {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.880882] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Created directory with path [datastore2] vmware_temp/2612fcc1-6828-4f3d-be83-ae1f64706579/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 942.881098] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] Fetch image to [datastore2] vmware_temp/2612fcc1-6828-4f3d-be83-ae1f64706579/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 942.881278] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to [datastore2] vmware_temp/2612fcc1-6828-4f3d-be83-ae1f64706579/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 942.882014] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-118cf4ae-9186-449b-8b87-32f851ef23a4 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.888473] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed6162cc-ab06-4bc5-87d3-7af8feda0a05 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.897323] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6b8cb0b-addb-4620-8113-582234997887 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.930724] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a1f14ff-347a-4201-a29b-949219fb4e47 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.937374] env[68040]: DEBUG oslo_vmware.api [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Task: {'id': task-3200212, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078802} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.938768] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Deleted the datastore file {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 942.938956] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Deleted contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 942.939144] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 942.939323] env[68040]: INFO nova.compute.manager [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Took 0.60 seconds to destroy the instance on the hypervisor. [ 942.941093] env[68040]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-515d7049-ba8c-4198-8a47-62361feb0d67 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.942911] env[68040]: DEBUG nova.compute.claims [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Aborting claim: {{(pid=68040) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 942.943097] env[68040]: DEBUG oslo_concurrency.lockutils [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 942.943313] env[68040]: DEBUG oslo_concurrency.lockutils [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 942.965770] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 943.018532] env[68040]: DEBUG oslo_vmware.rw_handles [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2612fcc1-6828-4f3d-be83-ae1f64706579/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 943.081715] env[68040]: DEBUG oslo_vmware.rw_handles [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Completed reading data from the image iterator. {{(pid=68040) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 943.081959] env[68040]: DEBUG oslo_vmware.rw_handles [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2612fcc1-6828-4f3d-be83-ae1f64706579/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 943.298616] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-096711de-45cf-4cf8-b7f0-40e17224827c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.306869] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3b0f757-0b94-4cac-8fb0-c43f5ddae020 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.336160] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52edc903-69f6-4539-8636-3a8a967d363a {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.343253] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d854971d-53ce-4019-8efe-7f30009d6ef1 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.357036] env[68040]: DEBUG nova.compute.provider_tree [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 943.366112] env[68040]: DEBUG nova.scheduler.client.report [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 943.381800] env[68040]: DEBUG oslo_concurrency.lockutils [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.438s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 943.382342] env[68040]: ERROR nova.compute.manager [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 943.382342] env[68040]: Faults: ['InvalidArgument'] [ 943.382342] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Traceback (most recent call last): [ 943.382342] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 943.382342] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] self.driver.spawn(context, instance, image_meta, [ 943.382342] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 943.382342] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] self._vmops.spawn(context, instance, image_meta, injected_files, [ 943.382342] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 943.382342] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] self._fetch_image_if_missing(context, vi) [ 943.382342] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 943.382342] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] image_cache(vi, tmp_image_ds_loc) [ 943.382342] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 943.382742] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] vm_util.copy_virtual_disk( [ 943.382742] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 943.382742] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] session._wait_for_task(vmdk_copy_task) [ 943.382742] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 943.382742] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] return self.wait_for_task(task_ref) [ 943.382742] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 943.382742] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] return evt.wait() [ 943.382742] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 943.382742] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] result = hub.switch() [ 943.382742] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 943.382742] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] return self.greenlet.switch() [ 943.382742] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 943.382742] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] self.f(*self.args, **self.kw) [ 943.383160] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 943.383160] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] raise exceptions.translate_fault(task_info.error) [ 943.383160] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 943.383160] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Faults: ['InvalidArgument'] [ 943.383160] env[68040]: ERROR nova.compute.manager [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] [ 943.383160] env[68040]: DEBUG nova.compute.utils [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] VimFaultException {{(pid=68040) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 943.384397] env[68040]: DEBUG nova.compute.manager [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Build of instance 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45 was re-scheduled: A specified parameter was not correct: fileType [ 943.384397] env[68040]: Faults: ['InvalidArgument'] {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 943.384767] env[68040]: DEBUG nova.compute.manager [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Unplugging VIFs for instance {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 943.384949] env[68040]: DEBUG nova.compute.manager [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 943.385116] env[68040]: DEBUG nova.compute.manager [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 943.385277] env[68040]: DEBUG nova.network.neutron [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 943.896480] env[68040]: DEBUG nova.network.neutron [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 943.906364] env[68040]: INFO nova.compute.manager [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Took 0.52 seconds to deallocate network for instance. [ 943.994041] env[68040]: INFO nova.scheduler.client.report [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Deleted allocations for instance 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45 [ 944.016977] env[68040]: DEBUG oslo_concurrency.lockutils [None req-128adcf1-9176-4741-b8e5-f987e25b5a20 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Lock "17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 291.624s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.018155] env[68040]: DEBUG oslo_concurrency.lockutils [None req-43459e79-3a4f-4b5f-828c-06661848124b tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Lock "17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 92.820s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.018383] env[68040]: DEBUG oslo_concurrency.lockutils [None req-43459e79-3a4f-4b5f-828c-06661848124b tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Acquiring lock "17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 944.018596] env[68040]: DEBUG oslo_concurrency.lockutils [None req-43459e79-3a4f-4b5f-828c-06661848124b tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Lock "17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.018762] env[68040]: DEBUG oslo_concurrency.lockutils [None req-43459e79-3a4f-4b5f-828c-06661848124b tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Lock "17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.020709] env[68040]: INFO nova.compute.manager [None req-43459e79-3a4f-4b5f-828c-06661848124b tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Terminating instance [ 944.022323] env[68040]: DEBUG nova.compute.manager [None req-43459e79-3a4f-4b5f-828c-06661848124b tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 944.022521] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-43459e79-3a4f-4b5f-828c-06661848124b tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 944.022982] env[68040]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9fcbaa76-794b-421b-b3de-1b5fff6e0174 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.034039] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a53d7d41-be73-4ab8-b3f3-51279f1902ad {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.045135] env[68040]: DEBUG nova.compute.manager [None req-b2463724-8a2d-4877-a8db-40b9436ca215 tempest-ServerAddressesNegativeTestJSON-701301007 tempest-ServerAddressesNegativeTestJSON-701301007-project-member] [instance: e2a9808e-6da7-4e53-a6d3-d3144ecf158a] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 944.065298] env[68040]: WARNING nova.virt.vmwareapi.vmops [None req-43459e79-3a4f-4b5f-828c-06661848124b tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45 could not be found. [ 944.065679] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-43459e79-3a4f-4b5f-828c-06661848124b tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 944.065912] env[68040]: INFO nova.compute.manager [None req-43459e79-3a4f-4b5f-828c-06661848124b tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Took 0.04 seconds to destroy the instance on the hypervisor. [ 944.066239] env[68040]: DEBUG oslo.service.loopingcall [None req-43459e79-3a4f-4b5f-828c-06661848124b tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 944.066455] env[68040]: DEBUG nova.compute.manager [-] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 944.066554] env[68040]: DEBUG nova.network.neutron [-] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 944.075944] env[68040]: DEBUG nova.compute.manager [None req-b2463724-8a2d-4877-a8db-40b9436ca215 tempest-ServerAddressesNegativeTestJSON-701301007 tempest-ServerAddressesNegativeTestJSON-701301007-project-member] [instance: e2a9808e-6da7-4e53-a6d3-d3144ecf158a] Instance disappeared before build. {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 944.088898] env[68040]: DEBUG nova.network.neutron [-] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 944.096462] env[68040]: INFO nova.compute.manager [-] [instance: 17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45] Took 0.03 seconds to deallocate network for instance. [ 944.101425] env[68040]: DEBUG oslo_concurrency.lockutils [None req-b2463724-8a2d-4877-a8db-40b9436ca215 tempest-ServerAddressesNegativeTestJSON-701301007 tempest-ServerAddressesNegativeTestJSON-701301007-project-member] Lock "e2a9808e-6da7-4e53-a6d3-d3144ecf158a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 239.612s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.109894] env[68040]: DEBUG nova.compute.manager [None req-87c93415-2d61-408b-8737-7a532e9b629a tempest-MultipleCreateTestJSON-225064095 tempest-MultipleCreateTestJSON-225064095-project-member] [instance: ba480b6b-3d33-4f60-b045-21fe059fd0a6] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 944.140780] env[68040]: DEBUG nova.compute.manager [None req-87c93415-2d61-408b-8737-7a532e9b629a tempest-MultipleCreateTestJSON-225064095 tempest-MultipleCreateTestJSON-225064095-project-member] [instance: ba480b6b-3d33-4f60-b045-21fe059fd0a6] Instance disappeared before build. {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 944.161583] env[68040]: DEBUG oslo_concurrency.lockutils [None req-87c93415-2d61-408b-8737-7a532e9b629a tempest-MultipleCreateTestJSON-225064095 tempest-MultipleCreateTestJSON-225064095-project-member] Lock "ba480b6b-3d33-4f60-b045-21fe059fd0a6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 238.447s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.170897] env[68040]: DEBUG nova.compute.manager [None req-87c93415-2d61-408b-8737-7a532e9b629a tempest-MultipleCreateTestJSON-225064095 tempest-MultipleCreateTestJSON-225064095-project-member] [instance: 0b61102f-1b2e-4962-b94b-d27d394c5aef] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 944.196596] env[68040]: DEBUG oslo_concurrency.lockutils [None req-43459e79-3a4f-4b5f-828c-06661848124b tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Lock "17e9e12a-ec93-4ac5-bb6d-d6d0adb11e45" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.178s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.197975] env[68040]: DEBUG nova.compute.manager [None req-87c93415-2d61-408b-8737-7a532e9b629a tempest-MultipleCreateTestJSON-225064095 tempest-MultipleCreateTestJSON-225064095-project-member] [instance: 0b61102f-1b2e-4962-b94b-d27d394c5aef] Instance disappeared before build. {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 944.216269] env[68040]: DEBUG oslo_concurrency.lockutils [None req-87c93415-2d61-408b-8737-7a532e9b629a tempest-MultipleCreateTestJSON-225064095 tempest-MultipleCreateTestJSON-225064095-project-member] Lock "0b61102f-1b2e-4962-b94b-d27d394c5aef" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 238.456s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.225175] env[68040]: DEBUG nova.compute.manager [None req-b30a638a-87d2-412a-b0ca-4e522e603b4c tempest-ServersNegativeTestJSON-912418572 tempest-ServersNegativeTestJSON-912418572-project-member] [instance: 856ec4dd-3a1d-4140-b3d5-52690cf87f92] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 944.247237] env[68040]: DEBUG nova.compute.manager [None req-b30a638a-87d2-412a-b0ca-4e522e603b4c tempest-ServersNegativeTestJSON-912418572 tempest-ServersNegativeTestJSON-912418572-project-member] [instance: 856ec4dd-3a1d-4140-b3d5-52690cf87f92] Instance disappeared before build. {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 944.267715] env[68040]: DEBUG oslo_concurrency.lockutils [None req-b30a638a-87d2-412a-b0ca-4e522e603b4c tempest-ServersNegativeTestJSON-912418572 tempest-ServersNegativeTestJSON-912418572-project-member] Lock "856ec4dd-3a1d-4140-b3d5-52690cf87f92" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 235.294s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.275956] env[68040]: DEBUG nova.compute.manager [None req-65a5caf1-356b-40c2-a6be-90ff3fd78b20 tempest-SecurityGroupsTestJSON-810175009 tempest-SecurityGroupsTestJSON-810175009-project-member] [instance: dae1bdde-e497-4ee6-9582-4988c5ae7a96] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 944.299169] env[68040]: DEBUG nova.compute.manager [None req-65a5caf1-356b-40c2-a6be-90ff3fd78b20 tempest-SecurityGroupsTestJSON-810175009 tempest-SecurityGroupsTestJSON-810175009-project-member] [instance: dae1bdde-e497-4ee6-9582-4988c5ae7a96] Instance disappeared before build. {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 944.321181] env[68040]: DEBUG oslo_concurrency.lockutils [None req-65a5caf1-356b-40c2-a6be-90ff3fd78b20 tempest-SecurityGroupsTestJSON-810175009 tempest-SecurityGroupsTestJSON-810175009-project-member] Lock "dae1bdde-e497-4ee6-9582-4988c5ae7a96" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 233.495s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.331734] env[68040]: DEBUG nova.compute.manager [None req-6a70d2e5-22e2-4c93-a852-ae7cba49af6f tempest-ServerPasswordTestJSON-1477827249 tempest-ServerPasswordTestJSON-1477827249-project-member] [instance: 1b382d10-944a-4817-b959-c8ad2664309e] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 944.355848] env[68040]: DEBUG nova.compute.manager [None req-6a70d2e5-22e2-4c93-a852-ae7cba49af6f tempest-ServerPasswordTestJSON-1477827249 tempest-ServerPasswordTestJSON-1477827249-project-member] [instance: 1b382d10-944a-4817-b959-c8ad2664309e] Instance disappeared before build. {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 944.379173] env[68040]: DEBUG oslo_concurrency.lockutils [None req-6a70d2e5-22e2-4c93-a852-ae7cba49af6f tempest-ServerPasswordTestJSON-1477827249 tempest-ServerPasswordTestJSON-1477827249-project-member] Lock "1b382d10-944a-4817-b959-c8ad2664309e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 232.284s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.388986] env[68040]: DEBUG nova.compute.manager [None req-ce3d29b4-4250-4781-8c88-e2937e3fe239 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] [instance: 37f5c20e-dbc4-46a3-a83f-c7329f7a764c] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 944.412256] env[68040]: DEBUG nova.compute.manager [None req-ce3d29b4-4250-4781-8c88-e2937e3fe239 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] [instance: 37f5c20e-dbc4-46a3-a83f-c7329f7a764c] Instance disappeared before build. {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 944.432765] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ce3d29b4-4250-4781-8c88-e2937e3fe239 tempest-DeleteServersAdminTestJSON-145190633 tempest-DeleteServersAdminTestJSON-145190633-project-member] Lock "37f5c20e-dbc4-46a3-a83f-c7329f7a764c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 232.241s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.441680] env[68040]: DEBUG nova.compute.manager [None req-f69692d9-d7cf-4204-b329-dad0300d9d74 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: 25cb12ed-d0ed-402f-ba73-3c6c835adb17] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 944.464239] env[68040]: DEBUG nova.compute.manager [None req-f69692d9-d7cf-4204-b329-dad0300d9d74 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: 25cb12ed-d0ed-402f-ba73-3c6c835adb17] Instance disappeared before build. {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 944.484762] env[68040]: DEBUG oslo_concurrency.lockutils [None req-f69692d9-d7cf-4204-b329-dad0300d9d74 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Lock "25cb12ed-d0ed-402f-ba73-3c6c835adb17" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 231.305s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.493410] env[68040]: DEBUG nova.compute.manager [None req-bb8a9998-523f-47d2-bf66-5a22cf5a5cd5 tempest-FloatingIPsAssociationNegativeTestJSON-1605724567 tempest-FloatingIPsAssociationNegativeTestJSON-1605724567-project-member] [instance: d42d2ca9-ac93-4efb-92a0-de248221dd43] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 944.516015] env[68040]: DEBUG nova.compute.manager [None req-bb8a9998-523f-47d2-bf66-5a22cf5a5cd5 tempest-FloatingIPsAssociationNegativeTestJSON-1605724567 tempest-FloatingIPsAssociationNegativeTestJSON-1605724567-project-member] [instance: d42d2ca9-ac93-4efb-92a0-de248221dd43] Instance disappeared before build. {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 944.537025] env[68040]: DEBUG oslo_concurrency.lockutils [None req-bb8a9998-523f-47d2-bf66-5a22cf5a5cd5 tempest-FloatingIPsAssociationNegativeTestJSON-1605724567 tempest-FloatingIPsAssociationNegativeTestJSON-1605724567-project-member] Lock "d42d2ca9-ac93-4efb-92a0-de248221dd43" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 231.345s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.545955] env[68040]: DEBUG nova.compute.manager [None req-ddc4c3b4-4a83-4085-90b6-f0bdea2e69a9 tempest-ServerActionsV293TestJSON-1697370989 tempest-ServerActionsV293TestJSON-1697370989-project-member] [instance: db379674-cc77-430b-bd6d-2f674d57a7ee] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 944.573046] env[68040]: DEBUG nova.compute.manager [None req-ddc4c3b4-4a83-4085-90b6-f0bdea2e69a9 tempest-ServerActionsV293TestJSON-1697370989 tempest-ServerActionsV293TestJSON-1697370989-project-member] [instance: db379674-cc77-430b-bd6d-2f674d57a7ee] Instance disappeared before build. {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 944.594919] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ddc4c3b4-4a83-4085-90b6-f0bdea2e69a9 tempest-ServerActionsV293TestJSON-1697370989 tempest-ServerActionsV293TestJSON-1697370989-project-member] Lock "db379674-cc77-430b-bd6d-2f674d57a7ee" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 227.503s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.620181] env[68040]: DEBUG nova.compute.manager [None req-4b75365a-fb09-4842-8956-e8a040cbcabf tempest-ServerRescueTestJSON-756786842 tempest-ServerRescueTestJSON-756786842-project-member] [instance: f17efcc0-2a35-4360-abdf-1543a4cd0fcc] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 944.643113] env[68040]: DEBUG nova.compute.manager [None req-4b75365a-fb09-4842-8956-e8a040cbcabf tempest-ServerRescueTestJSON-756786842 tempest-ServerRescueTestJSON-756786842-project-member] [instance: f17efcc0-2a35-4360-abdf-1543a4cd0fcc] Instance disappeared before build. {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 944.667125] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4b75365a-fb09-4842-8956-e8a040cbcabf tempest-ServerRescueTestJSON-756786842 tempest-ServerRescueTestJSON-756786842-project-member] Lock "f17efcc0-2a35-4360-abdf-1543a4cd0fcc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 217.794s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.680890] env[68040]: DEBUG nova.compute.manager [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 944.738593] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 944.738875] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.740434] env[68040]: INFO nova.compute.claims [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 945.080974] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ff87cbb-cd34-4c85-ac25-c96633f6175e {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.088324] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc82f130-feca-4dcf-96ae-b913743099b8 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.120309] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ac38d74-12d8-45cf-9135-c7cef994a27f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.129171] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c150185-cf6d-4b76-b2b9-6c2f133794c2 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.143092] env[68040]: DEBUG nova.compute.provider_tree [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 945.153754] env[68040]: DEBUG nova.scheduler.client.report [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 945.171449] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.432s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 945.173010] env[68040]: DEBUG nova.compute.manager [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Start building networks asynchronously for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 945.208418] env[68040]: DEBUG nova.compute.utils [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Using /dev/sd instead of None {{(pid=68040) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 945.209888] env[68040]: DEBUG nova.compute.manager [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Allocating IP information in the background. {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 945.210080] env[68040]: DEBUG nova.network.neutron [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] allocate_for_instance() {{(pid=68040) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 945.220017] env[68040]: DEBUG nova.compute.manager [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Start building block device mappings for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 945.289309] env[68040]: DEBUG nova.compute.manager [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Start spawning the instance on the hypervisor. {{(pid=68040) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 945.301492] env[68040]: DEBUG nova.policy [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '329c5e765af14548ad59c86860a5cf47', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c0276ed66cde4a96bb03a3005a30325d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68040) authorize /opt/stack/nova/nova/policy.py:203}} [ 945.312888] env[68040]: DEBUG nova.virt.hardware [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:59:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:59:33Z,direct_url=,disk_format='vmdk',id=8c308313-03d5-40b6-a5fe-9037e32dc76e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0770d674a39c40089de0aade9440b370',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:59:34Z,virtual_size=,visibility=), allow threads: False {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 945.313177] env[68040]: DEBUG nova.virt.hardware [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Flavor limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 945.313347] env[68040]: DEBUG nova.virt.hardware [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Image limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 945.313603] env[68040]: DEBUG nova.virt.hardware [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Flavor pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 945.313764] env[68040]: DEBUG nova.virt.hardware [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Image pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 945.313915] env[68040]: DEBUG nova.virt.hardware [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 945.314950] env[68040]: DEBUG nova.virt.hardware [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 945.315157] env[68040]: DEBUG nova.virt.hardware [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 945.315341] env[68040]: DEBUG nova.virt.hardware [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Got 1 possible topologies {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 945.315512] env[68040]: DEBUG nova.virt.hardware [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 945.315776] env[68040]: DEBUG nova.virt.hardware [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 945.317447] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0029f472-6825-4e62-bec3-f449f5780801 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.325443] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a94db865-abdc-4596-9b46-c4052abe3176 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.478748] env[68040]: DEBUG oslo_concurrency.lockutils [None req-675ea968-f2bd-4ccc-9ae9-19b4a4037098 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Acquiring lock "3e7cb203-0bad-49d0-83d4-b5a086c31ad6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 945.478986] env[68040]: DEBUG oslo_concurrency.lockutils [None req-675ea968-f2bd-4ccc-9ae9-19b4a4037098 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Lock "3e7cb203-0bad-49d0-83d4-b5a086c31ad6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 945.642599] env[68040]: DEBUG nova.network.neutron [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Successfully created port: eb72d5d3-6845-4a36-af2a-18556f4f14a1 {{(pid=68040) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 946.207578] env[68040]: DEBUG nova.compute.manager [req-dee22fd7-64ba-4ff6-811c-a52d178ce4a5 req-3a723238-7065-41ea-b348-fac91c239d41 service nova] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Received event network-vif-plugged-eb72d5d3-6845-4a36-af2a-18556f4f14a1 {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 946.207846] env[68040]: DEBUG oslo_concurrency.lockutils [req-dee22fd7-64ba-4ff6-811c-a52d178ce4a5 req-3a723238-7065-41ea-b348-fac91c239d41 service nova] Acquiring lock "bce68a2b-260c-45cc-ac98-d4b01b4513a4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 946.208012] env[68040]: DEBUG oslo_concurrency.lockutils [req-dee22fd7-64ba-4ff6-811c-a52d178ce4a5 req-3a723238-7065-41ea-b348-fac91c239d41 service nova] Lock "bce68a2b-260c-45cc-ac98-d4b01b4513a4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 946.208242] env[68040]: DEBUG oslo_concurrency.lockutils [req-dee22fd7-64ba-4ff6-811c-a52d178ce4a5 req-3a723238-7065-41ea-b348-fac91c239d41 service nova] Lock "bce68a2b-260c-45cc-ac98-d4b01b4513a4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 946.208469] env[68040]: DEBUG nova.compute.manager [req-dee22fd7-64ba-4ff6-811c-a52d178ce4a5 req-3a723238-7065-41ea-b348-fac91c239d41 service nova] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] No waiting events found dispatching network-vif-plugged-eb72d5d3-6845-4a36-af2a-18556f4f14a1 {{(pid=68040) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 946.209055] env[68040]: WARNING nova.compute.manager [req-dee22fd7-64ba-4ff6-811c-a52d178ce4a5 req-3a723238-7065-41ea-b348-fac91c239d41 service nova] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Received unexpected event network-vif-plugged-eb72d5d3-6845-4a36-af2a-18556f4f14a1 for instance with vm_state building and task_state spawning. [ 946.275480] env[68040]: DEBUG nova.network.neutron [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Successfully updated port: eb72d5d3-6845-4a36-af2a-18556f4f14a1 {{(pid=68040) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 946.293083] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Acquiring lock "refresh_cache-bce68a2b-260c-45cc-ac98-d4b01b4513a4" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 946.293823] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Acquired lock "refresh_cache-bce68a2b-260c-45cc-ac98-d4b01b4513a4" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 946.293823] env[68040]: DEBUG nova.network.neutron [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Building network info cache for instance {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 946.334547] env[68040]: DEBUG nova.network.neutron [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 946.497048] env[68040]: DEBUG nova.network.neutron [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Updating instance_info_cache with network_info: [{"id": "eb72d5d3-6845-4a36-af2a-18556f4f14a1", "address": "fa:16:3e:80:c4:74", "network": {"id": "f36ad059-499c-46f8-a9bd-a005304fdc99", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1337412262-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0276ed66cde4a96bb03a3005a30325d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fa01fe1a-83b6-4c10-af75-00ddb17f9bbf", "external-id": "nsx-vlan-transportzone-431", "segmentation_id": 431, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb72d5d3-68", "ovs_interfaceid": "eb72d5d3-6845-4a36-af2a-18556f4f14a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 946.507694] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Releasing lock "refresh_cache-bce68a2b-260c-45cc-ac98-d4b01b4513a4" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 946.507967] env[68040]: DEBUG nova.compute.manager [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Instance network_info: |[{"id": "eb72d5d3-6845-4a36-af2a-18556f4f14a1", "address": "fa:16:3e:80:c4:74", "network": {"id": "f36ad059-499c-46f8-a9bd-a005304fdc99", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1337412262-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0276ed66cde4a96bb03a3005a30325d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fa01fe1a-83b6-4c10-af75-00ddb17f9bbf", "external-id": "nsx-vlan-transportzone-431", "segmentation_id": 431, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb72d5d3-68", "ovs_interfaceid": "eb72d5d3-6845-4a36-af2a-18556f4f14a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 946.508372] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:80:c4:74', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fa01fe1a-83b6-4c10-af75-00ddb17f9bbf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'eb72d5d3-6845-4a36-af2a-18556f4f14a1', 'vif_model': 'vmxnet3'}] {{(pid=68040) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 946.515885] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Creating folder: Project (c0276ed66cde4a96bb03a3005a30325d). Parent ref: group-v639956. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 946.516417] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7d5d4118-a946-47f0-8496-67e89398fcdd {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.526783] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Created folder: Project (c0276ed66cde4a96bb03a3005a30325d) in parent group-v639956. [ 946.526962] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Creating folder: Instances. Parent ref: group-v640004. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 946.527187] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2d9686bd-9d1c-492b-b228-ad3d7cfd109c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.535517] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Created folder: Instances in parent group-v640004. [ 946.535733] env[68040]: DEBUG oslo.service.loopingcall [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 946.535906] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Creating VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 946.536103] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9921bc43-a066-4255-9be9-eaf3d4e81dc8 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.554059] env[68040]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 946.554059] env[68040]: value = "task-3200215" [ 946.554059] env[68040]: _type = "Task" [ 946.554059] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.561119] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200215, 'name': CreateVM_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.063616] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200215, 'name': CreateVM_Task, 'duration_secs': 0.296999} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.063756] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Created VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 947.064601] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 947.064601] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 947.064869] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 947.065142] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dbbf178e-5ec7-4de6-bcf0-15633585585b {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.069629] env[68040]: DEBUG oslo_vmware.api [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Waiting for the task: (returnval){ [ 947.069629] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52629742-edf2-b747-03b5-b7bc4b60d397" [ 947.069629] env[68040]: _type = "Task" [ 947.069629] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.077101] env[68040]: DEBUG oslo_vmware.api [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52629742-edf2-b747-03b5-b7bc4b60d397, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.581558] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 947.581811] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Processing image 8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 947.582032] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 948.553220] env[68040]: DEBUG nova.compute.manager [req-89dc9a4c-c797-47ae-8a6e-08aa91ecb34d req-8d8db565-a0f0-4ea9-8a3b-c11d358e9eb3 service nova] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Received event network-changed-eb72d5d3-6845-4a36-af2a-18556f4f14a1 {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 948.553267] env[68040]: DEBUG nova.compute.manager [req-89dc9a4c-c797-47ae-8a6e-08aa91ecb34d req-8d8db565-a0f0-4ea9-8a3b-c11d358e9eb3 service nova] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Refreshing instance network info cache due to event network-changed-eb72d5d3-6845-4a36-af2a-18556f4f14a1. {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 948.553480] env[68040]: DEBUG oslo_concurrency.lockutils [req-89dc9a4c-c797-47ae-8a6e-08aa91ecb34d req-8d8db565-a0f0-4ea9-8a3b-c11d358e9eb3 service nova] Acquiring lock "refresh_cache-bce68a2b-260c-45cc-ac98-d4b01b4513a4" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 948.553631] env[68040]: DEBUG oslo_concurrency.lockutils [req-89dc9a4c-c797-47ae-8a6e-08aa91ecb34d req-8d8db565-a0f0-4ea9-8a3b-c11d358e9eb3 service nova] Acquired lock "refresh_cache-bce68a2b-260c-45cc-ac98-d4b01b4513a4" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.553792] env[68040]: DEBUG nova.network.neutron [req-89dc9a4c-c797-47ae-8a6e-08aa91ecb34d req-8d8db565-a0f0-4ea9-8a3b-c11d358e9eb3 service nova] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Refreshing network info cache for port eb72d5d3-6845-4a36-af2a-18556f4f14a1 {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 948.807094] env[68040]: DEBUG nova.network.neutron [req-89dc9a4c-c797-47ae-8a6e-08aa91ecb34d req-8d8db565-a0f0-4ea9-8a3b-c11d358e9eb3 service nova] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Updated VIF entry in instance network info cache for port eb72d5d3-6845-4a36-af2a-18556f4f14a1. {{(pid=68040) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 948.807476] env[68040]: DEBUG nova.network.neutron [req-89dc9a4c-c797-47ae-8a6e-08aa91ecb34d req-8d8db565-a0f0-4ea9-8a3b-c11d358e9eb3 service nova] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Updating instance_info_cache with network_info: [{"id": "eb72d5d3-6845-4a36-af2a-18556f4f14a1", "address": "fa:16:3e:80:c4:74", "network": {"id": "f36ad059-499c-46f8-a9bd-a005304fdc99", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1337412262-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0276ed66cde4a96bb03a3005a30325d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fa01fe1a-83b6-4c10-af75-00ddb17f9bbf", "external-id": "nsx-vlan-transportzone-431", "segmentation_id": 431, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb72d5d3-68", "ovs_interfaceid": "eb72d5d3-6845-4a36-af2a-18556f4f14a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 948.816790] env[68040]: DEBUG oslo_concurrency.lockutils [req-89dc9a4c-c797-47ae-8a6e-08aa91ecb34d req-8d8db565-a0f0-4ea9-8a3b-c11d358e9eb3 service nova] Releasing lock "refresh_cache-bce68a2b-260c-45cc-ac98-d4b01b4513a4" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 952.020889] env[68040]: DEBUG oslo_concurrency.lockutils [None req-bc1322f3-5659-4e3d-b876-c8389db71213 tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Acquiring lock "bce68a2b-260c-45cc-ac98-d4b01b4513a4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 961.984685] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 962.980124] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 963.005086] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 963.005086] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Starting heal instance info cache {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 963.005086] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Rebuilding the list of instances to heal {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 963.024903] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 963.025494] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 963.025657] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 963.025793] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 963.026509] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 963.026509] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 963.026509] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 963.026509] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 963.026509] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 963.026728] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 963.026728] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Didn't find any instances for network info cache update. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 963.027834] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 963.038277] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 963.038515] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 963.038687] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 963.038840] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68040) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 963.039902] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0907a74-2604-4035-b6b4-1682ec611ee2 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.048741] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcd6e13d-ca32-464d-b6c4-7163e339aa75 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.063242] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-273b60ee-13cd-4461-9d28-bc6c7e7d04c9 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.069506] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1b54409-f2d3-4ab1-8fea-cbb2499e13a5 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.099547] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180982MB free_disk=125GB free_vcpus=48 pci_devices=None {{(pid=68040) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 963.099709] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 963.099911] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 963.171255] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 3411cba3-71c9-4334-bc79-4e322f4231f1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 963.171427] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 97b050ff-2997-4504-8787-04f1221251b8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 963.171558] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance b81d413c-2449-471a-b3d9-693fc0ab2824 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 963.171683] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 39de4e78-44cd-4582-998e-88ce6de2d51c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 963.171805] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance a89ff564-ea35-4000-8efa-2c1ec2b61759 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 963.171927] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 42f39352-e703-4ebf-9559-4c8b5abca70e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 963.172059] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance d1819f29-a891-47dd-a456-8f3b127daf6f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 963.172184] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 1812f13e-b03d-48d4-940a-43974784265b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 963.172300] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance de1b8ef9-0088-4d2a-985e-d04fcff55d31 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 963.172414] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance bce68a2b-260c-45cc-ac98-d4b01b4513a4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 963.184020] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 1e43f6be-f6a3-4569-adea-c82a5d709247 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 963.193883] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 3738de32-79cd-4b04-8081-cc1146730c75 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 963.203456] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance b9c0cbae-d76a-4ec9-9cc8-727d011dc5f1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 963.213200] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance a51e5779-42bf-4281-8c46-1dcc771382ae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 963.223429] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance f740b2a2-a3f5-45d7-913f-5455236a2620 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 963.232653] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 963.242517] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 3f9438b9-bfe3-4a7f-bfec-e140fed5f66c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 963.251572] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance e2cd4cbd-279b-4852-85b0-f78af45bf7e2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 963.260772] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 8ec0f6bd-4a3e-4e70-b310-714676607b9c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 963.270865] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 25298be1-8cc9-46fa-9b33-62425bcb91dc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 963.279998] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 67756ba9-5738-4669-ace9-a3d2f1952dfa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 963.289922] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 940a6a43-d74e-419c-af5b-92c991e3649d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 963.298869] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 1b4b422a-f096-4ed1-9d47-f150e7a3434f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 963.307842] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 3e7cb203-0bad-49d0-83d4-b5a086c31ad6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 963.308099] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 963.308253] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 963.577303] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6052754f-97e2-4a73-8926-5f78f05d833c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.585058] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fecbbe2b-60d9-44b9-9ec6-6d6987d27aaf {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.614831] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-503daad9-e0e7-48f9-89ac-6a82789277ef {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.622167] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4ece7fa-2727-4940-8105-329049f6d827 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.637150] env[68040]: DEBUG nova.compute.provider_tree [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 963.645338] env[68040]: DEBUG nova.scheduler.client.report [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 963.661585] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68040) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 963.661942] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.562s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 964.618675] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 964.619088] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 964.619088] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 965.984378] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 966.980298] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 966.984056] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 966.984056] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68040) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 991.832336] env[68040]: WARNING oslo_vmware.rw_handles [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 991.832336] env[68040]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 991.832336] env[68040]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 991.832336] env[68040]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 991.832336] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 991.832336] env[68040]: ERROR oslo_vmware.rw_handles response.begin() [ 991.832336] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 991.832336] env[68040]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 991.832336] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 991.832336] env[68040]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 991.832336] env[68040]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 991.832336] env[68040]: ERROR oslo_vmware.rw_handles [ 991.833045] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] Downloaded image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to vmware_temp/2612fcc1-6828-4f3d-be83-ae1f64706579/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 991.834608] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] Caching image {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 991.834876] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Copying Virtual Disk [datastore2] vmware_temp/2612fcc1-6828-4f3d-be83-ae1f64706579/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk to [datastore2] vmware_temp/2612fcc1-6828-4f3d-be83-ae1f64706579/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk {{(pid=68040) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 991.835215] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6fe95cfd-c24c-4cda-a1c6-df581675163d {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.845419] env[68040]: DEBUG oslo_vmware.api [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Waiting for the task: (returnval){ [ 991.845419] env[68040]: value = "task-3200216" [ 991.845419] env[68040]: _type = "Task" [ 991.845419] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.853022] env[68040]: DEBUG oslo_vmware.api [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Task: {'id': task-3200216, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.355906] env[68040]: DEBUG oslo_vmware.exceptions [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Fault InvalidArgument not matched. {{(pid=68040) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 992.356149] env[68040]: DEBUG oslo_concurrency.lockutils [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 992.356836] env[68040]: ERROR nova.compute.manager [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 992.356836] env[68040]: Faults: ['InvalidArgument'] [ 992.356836] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] Traceback (most recent call last): [ 992.356836] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 992.356836] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] yield resources [ 992.356836] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 992.356836] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] self.driver.spawn(context, instance, image_meta, [ 992.356836] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 992.356836] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 992.356836] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 992.356836] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] self._fetch_image_if_missing(context, vi) [ 992.356836] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 992.357363] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] image_cache(vi, tmp_image_ds_loc) [ 992.357363] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 992.357363] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] vm_util.copy_virtual_disk( [ 992.357363] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 992.357363] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] session._wait_for_task(vmdk_copy_task) [ 992.357363] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 992.357363] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] return self.wait_for_task(task_ref) [ 992.357363] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 992.357363] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] return evt.wait() [ 992.357363] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 992.357363] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] result = hub.switch() [ 992.357363] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 992.357363] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] return self.greenlet.switch() [ 992.358061] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 992.358061] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] self.f(*self.args, **self.kw) [ 992.358061] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 992.358061] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] raise exceptions.translate_fault(task_info.error) [ 992.358061] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 992.358061] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] Faults: ['InvalidArgument'] [ 992.358061] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] [ 992.358061] env[68040]: INFO nova.compute.manager [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] Terminating instance [ 992.359042] env[68040]: DEBUG oslo_concurrency.lockutils [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 992.359235] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 992.359342] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0a9294cc-58e7-4de8-ba69-0f549bf20728 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.361751] env[68040]: DEBUG nova.compute.manager [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 992.361949] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 992.362689] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b22dd429-3f72-4c88-88bd-3a68a0647549 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.370050] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] Unregistering the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 992.371235] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-65c1dc1a-2810-486a-8f57-a0711df1b517 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.372624] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 992.372798] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68040) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 992.373474] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c02bc825-513a-4497-b265-b4d5ee773caf {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.380318] env[68040]: DEBUG oslo_vmware.api [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Waiting for the task: (returnval){ [ 992.380318] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52fd3290-7217-74ca-1c2e-55b76bda65ec" [ 992.380318] env[68040]: _type = "Task" [ 992.380318] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.391711] env[68040]: DEBUG oslo_vmware.api [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52fd3290-7217-74ca-1c2e-55b76bda65ec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.476024] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] Unregistered the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 992.476024] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] Deleting contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 992.476024] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Deleting the datastore file [datastore2] 3411cba3-71c9-4334-bc79-4e322f4231f1 {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 992.476722] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-73df6fdd-0fb6-4228-88fb-58f0ff9e442f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.486050] env[68040]: DEBUG oslo_vmware.api [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Waiting for the task: (returnval){ [ 992.486050] env[68040]: value = "task-3200218" [ 992.486050] env[68040]: _type = "Task" [ 992.486050] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.494154] env[68040]: DEBUG oslo_vmware.api [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Task: {'id': task-3200218, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.891258] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Preparing fetch location {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 992.891680] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Creating directory with path [datastore2] vmware_temp/007e3852-c68c-4c62-9ed8-e7c52ae627cf/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 992.891904] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-805fc43d-8123-4dfc-8420-518b1e6421aa {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.902798] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Created directory with path [datastore2] vmware_temp/007e3852-c68c-4c62-9ed8-e7c52ae627cf/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 992.903052] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Fetch image to [datastore2] vmware_temp/007e3852-c68c-4c62-9ed8-e7c52ae627cf/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 992.903289] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to [datastore2] vmware_temp/007e3852-c68c-4c62-9ed8-e7c52ae627cf/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 992.904114] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cca5d207-4f12-4502-bc08-9eb8452796c3 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.911290] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9e3eb97-d195-40e0-9aa5-35940382a9e9 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.919943] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a20aeee-3a3b-40bd-a133-13f52fd3c744 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.949165] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e7b093c-3bad-4726-8e17-93af3079b26d {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.954425] env[68040]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-247bcde4-7ab3-4d45-a97d-6e00f8595a00 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.973722] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 992.993102] env[68040]: DEBUG oslo_vmware.api [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Task: {'id': task-3200218, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.089062} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.994983] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Deleted the datastore file {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 992.995191] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] Deleted contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 992.995366] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 992.995544] env[68040]: INFO nova.compute.manager [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] Took 0.63 seconds to destroy the instance on the hypervisor. [ 992.997754] env[68040]: DEBUG nova.compute.claims [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] Aborting claim: {{(pid=68040) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 992.997919] env[68040]: DEBUG oslo_concurrency.lockutils [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 992.998278] env[68040]: DEBUG oslo_concurrency.lockutils [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 993.030769] env[68040]: DEBUG oslo_vmware.rw_handles [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/007e3852-c68c-4c62-9ed8-e7c52ae627cf/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 993.093429] env[68040]: DEBUG oslo_vmware.rw_handles [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Completed reading data from the image iterator. {{(pid=68040) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 993.093429] env[68040]: DEBUG oslo_vmware.rw_handles [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/007e3852-c68c-4c62-9ed8-e7c52ae627cf/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 993.361077] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f77ad00-f8ca-42b4-b56a-d4d98b1c614c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.369925] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-772eb76f-f89b-470a-bbd5-163a68d43f4b {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.399027] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c6c715e-4391-44a0-b570-b315b93f1fa1 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.406991] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-add5e60a-e7be-41ce-840f-e347fe420901 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.420087] env[68040]: DEBUG nova.compute.provider_tree [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 993.429589] env[68040]: DEBUG nova.scheduler.client.report [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 993.445581] env[68040]: DEBUG oslo_concurrency.lockutils [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.447s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 993.446218] env[68040]: ERROR nova.compute.manager [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 993.446218] env[68040]: Faults: ['InvalidArgument'] [ 993.446218] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] Traceback (most recent call last): [ 993.446218] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 993.446218] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] self.driver.spawn(context, instance, image_meta, [ 993.446218] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 993.446218] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 993.446218] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 993.446218] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] self._fetch_image_if_missing(context, vi) [ 993.446218] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 993.446218] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] image_cache(vi, tmp_image_ds_loc) [ 993.446218] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 993.446611] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] vm_util.copy_virtual_disk( [ 993.446611] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 993.446611] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] session._wait_for_task(vmdk_copy_task) [ 993.446611] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 993.446611] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] return self.wait_for_task(task_ref) [ 993.446611] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 993.446611] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] return evt.wait() [ 993.446611] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 993.446611] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] result = hub.switch() [ 993.446611] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 993.446611] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] return self.greenlet.switch() [ 993.446611] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 993.446611] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] self.f(*self.args, **self.kw) [ 993.447168] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 993.447168] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] raise exceptions.translate_fault(task_info.error) [ 993.447168] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 993.447168] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] Faults: ['InvalidArgument'] [ 993.447168] env[68040]: ERROR nova.compute.manager [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] [ 993.447168] env[68040]: DEBUG nova.compute.utils [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] VimFaultException {{(pid=68040) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 993.448410] env[68040]: DEBUG nova.compute.manager [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] Build of instance 3411cba3-71c9-4334-bc79-4e322f4231f1 was re-scheduled: A specified parameter was not correct: fileType [ 993.448410] env[68040]: Faults: ['InvalidArgument'] {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 993.448777] env[68040]: DEBUG nova.compute.manager [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] Unplugging VIFs for instance {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 993.448959] env[68040]: DEBUG nova.compute.manager [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 993.449144] env[68040]: DEBUG nova.compute.manager [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 993.449313] env[68040]: DEBUG nova.network.neutron [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 993.771022] env[68040]: DEBUG nova.network.neutron [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 993.779562] env[68040]: INFO nova.compute.manager [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 3411cba3-71c9-4334-bc79-4e322f4231f1] Took 0.33 seconds to deallocate network for instance. [ 993.885111] env[68040]: INFO nova.scheduler.client.report [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Deleted allocations for instance 3411cba3-71c9-4334-bc79-4e322f4231f1 [ 993.910507] env[68040]: DEBUG oslo_concurrency.lockutils [None req-876cbfc8-bf75-496f-9be0-3086779e6117 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Lock "3411cba3-71c9-4334-bc79-4e322f4231f1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 338.139s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 993.924581] env[68040]: DEBUG nova.compute.manager [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 993.979157] env[68040]: DEBUG oslo_concurrency.lockutils [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 993.979416] env[68040]: DEBUG oslo_concurrency.lockutils [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 993.981050] env[68040]: INFO nova.compute.claims [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 994.297253] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59047c11-bace-4193-812b-5d29ba901912 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.304710] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-612eac24-9d69-4d24-bf0b-81dd83cdc359 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.334326] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-302090a1-ef40-47bb-a5aa-a4c6014fefec {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.341561] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fb2cbb9-a09f-47f2-a302-967039f5b8cf {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.354418] env[68040]: DEBUG nova.compute.provider_tree [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 994.363464] env[68040]: DEBUG nova.scheduler.client.report [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 994.379651] env[68040]: DEBUG oslo_concurrency.lockutils [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.400s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 994.380176] env[68040]: DEBUG nova.compute.manager [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Start building networks asynchronously for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 994.413036] env[68040]: DEBUG nova.compute.utils [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Using /dev/sd instead of None {{(pid=68040) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 994.414206] env[68040]: DEBUG nova.compute.manager [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Allocating IP information in the background. {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 994.414429] env[68040]: DEBUG nova.network.neutron [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] allocate_for_instance() {{(pid=68040) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 994.424496] env[68040]: DEBUG nova.compute.manager [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Start building block device mappings for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 994.469734] env[68040]: DEBUG nova.policy [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd5b784bb2384457e9bcc4e9ff02ea850', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9a2c3ee9bf1c40228a089e4b0e5bff00', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68040) authorize /opt/stack/nova/nova/policy.py:203}} [ 994.487083] env[68040]: DEBUG nova.compute.manager [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Start spawning the instance on the hypervisor. {{(pid=68040) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 994.512566] env[68040]: DEBUG nova.virt.hardware [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:59:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:59:33Z,direct_url=,disk_format='vmdk',id=8c308313-03d5-40b6-a5fe-9037e32dc76e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0770d674a39c40089de0aade9440b370',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:59:34Z,virtual_size=,visibility=), allow threads: False {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 994.512807] env[68040]: DEBUG nova.virt.hardware [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Flavor limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 994.512969] env[68040]: DEBUG nova.virt.hardware [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Image limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 994.513208] env[68040]: DEBUG nova.virt.hardware [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Flavor pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 994.513361] env[68040]: DEBUG nova.virt.hardware [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Image pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 994.513510] env[68040]: DEBUG nova.virt.hardware [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 994.513726] env[68040]: DEBUG nova.virt.hardware [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 994.513890] env[68040]: DEBUG nova.virt.hardware [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 994.514071] env[68040]: DEBUG nova.virt.hardware [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Got 1 possible topologies {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 994.514243] env[68040]: DEBUG nova.virt.hardware [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 994.514434] env[68040]: DEBUG nova.virt.hardware [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 994.515349] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5cbdc00-11de-4d0e-b97d-7e66bb90329b {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.523412] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52f5b709-6b01-47bc-ba0b-cbd7504a9706 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.776448] env[68040]: DEBUG nova.network.neutron [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Successfully created port: c955dfdf-4784-4f75-880e-af722cfc6d6c {{(pid=68040) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 995.394075] env[68040]: DEBUG nova.network.neutron [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Successfully updated port: c955dfdf-4784-4f75-880e-af722cfc6d6c {{(pid=68040) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 995.413208] env[68040]: DEBUG oslo_concurrency.lockutils [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Acquiring lock "refresh_cache-1e43f6be-f6a3-4569-adea-c82a5d709247" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 995.413369] env[68040]: DEBUG oslo_concurrency.lockutils [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Acquired lock "refresh_cache-1e43f6be-f6a3-4569-adea-c82a5d709247" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 995.414478] env[68040]: DEBUG nova.network.neutron [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Building network info cache for instance {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 995.454286] env[68040]: DEBUG nova.network.neutron [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 995.623264] env[68040]: DEBUG nova.network.neutron [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Updating instance_info_cache with network_info: [{"id": "c955dfdf-4784-4f75-880e-af722cfc6d6c", "address": "fa:16:3e:0e:4e:40", "network": {"id": "9565e3df-4a40-4611-a5a9-efd2bc66053b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-780365588-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9a2c3ee9bf1c40228a089e4b0e5bff00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc955dfdf-47", "ovs_interfaceid": "c955dfdf-4784-4f75-880e-af722cfc6d6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 995.636815] env[68040]: DEBUG oslo_concurrency.lockutils [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Releasing lock "refresh_cache-1e43f6be-f6a3-4569-adea-c82a5d709247" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 995.637107] env[68040]: DEBUG nova.compute.manager [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Instance network_info: |[{"id": "c955dfdf-4784-4f75-880e-af722cfc6d6c", "address": "fa:16:3e:0e:4e:40", "network": {"id": "9565e3df-4a40-4611-a5a9-efd2bc66053b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-780365588-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9a2c3ee9bf1c40228a089e4b0e5bff00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc955dfdf-47", "ovs_interfaceid": "c955dfdf-4784-4f75-880e-af722cfc6d6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 995.637522] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0e:4e:40', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7aa6264-122d-4c35-82d0-860e451538ea', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c955dfdf-4784-4f75-880e-af722cfc6d6c', 'vif_model': 'vmxnet3'}] {{(pid=68040) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 995.646111] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Creating folder: Project (9a2c3ee9bf1c40228a089e4b0e5bff00). Parent ref: group-v639956. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 995.646739] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-12eaaefd-3a71-464e-9b46-27c39a933922 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.658369] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Created folder: Project (9a2c3ee9bf1c40228a089e4b0e5bff00) in parent group-v639956. [ 995.658556] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Creating folder: Instances. Parent ref: group-v640007. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 995.658771] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-54636fc0-dd09-4031-b6c6-c303c9e0295f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.666690] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Created folder: Instances in parent group-v640007. [ 995.666921] env[68040]: DEBUG oslo.service.loopingcall [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 995.667111] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Creating VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 995.667301] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e5dd1b5f-1a5d-4017-bafe-5c488d26a97a {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.687816] env[68040]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 995.687816] env[68040]: value = "task-3200221" [ 995.687816] env[68040]: _type = "Task" [ 995.687816] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.695739] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200221, 'name': CreateVM_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.023657] env[68040]: DEBUG nova.compute.manager [req-a347bf22-0c15-4f21-abbb-23f489ea2506 req-14c9617e-2ece-45a0-8654-fe89ffcb8efa service nova] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Received event network-vif-plugged-c955dfdf-4784-4f75-880e-af722cfc6d6c {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 996.023890] env[68040]: DEBUG oslo_concurrency.lockutils [req-a347bf22-0c15-4f21-abbb-23f489ea2506 req-14c9617e-2ece-45a0-8654-fe89ffcb8efa service nova] Acquiring lock "1e43f6be-f6a3-4569-adea-c82a5d709247-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 996.024284] env[68040]: DEBUG oslo_concurrency.lockutils [req-a347bf22-0c15-4f21-abbb-23f489ea2506 req-14c9617e-2ece-45a0-8654-fe89ffcb8efa service nova] Lock "1e43f6be-f6a3-4569-adea-c82a5d709247-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 996.024527] env[68040]: DEBUG oslo_concurrency.lockutils [req-a347bf22-0c15-4f21-abbb-23f489ea2506 req-14c9617e-2ece-45a0-8654-fe89ffcb8efa service nova] Lock "1e43f6be-f6a3-4569-adea-c82a5d709247-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 996.024740] env[68040]: DEBUG nova.compute.manager [req-a347bf22-0c15-4f21-abbb-23f489ea2506 req-14c9617e-2ece-45a0-8654-fe89ffcb8efa service nova] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] No waiting events found dispatching network-vif-plugged-c955dfdf-4784-4f75-880e-af722cfc6d6c {{(pid=68040) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 996.024995] env[68040]: WARNING nova.compute.manager [req-a347bf22-0c15-4f21-abbb-23f489ea2506 req-14c9617e-2ece-45a0-8654-fe89ffcb8efa service nova] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Received unexpected event network-vif-plugged-c955dfdf-4784-4f75-880e-af722cfc6d6c for instance with vm_state building and task_state spawning. [ 996.025139] env[68040]: DEBUG nova.compute.manager [req-a347bf22-0c15-4f21-abbb-23f489ea2506 req-14c9617e-2ece-45a0-8654-fe89ffcb8efa service nova] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Received event network-changed-c955dfdf-4784-4f75-880e-af722cfc6d6c {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 996.025321] env[68040]: DEBUG nova.compute.manager [req-a347bf22-0c15-4f21-abbb-23f489ea2506 req-14c9617e-2ece-45a0-8654-fe89ffcb8efa service nova] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Refreshing instance network info cache due to event network-changed-c955dfdf-4784-4f75-880e-af722cfc6d6c. {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 996.025528] env[68040]: DEBUG oslo_concurrency.lockutils [req-a347bf22-0c15-4f21-abbb-23f489ea2506 req-14c9617e-2ece-45a0-8654-fe89ffcb8efa service nova] Acquiring lock "refresh_cache-1e43f6be-f6a3-4569-adea-c82a5d709247" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 996.025687] env[68040]: DEBUG oslo_concurrency.lockutils [req-a347bf22-0c15-4f21-abbb-23f489ea2506 req-14c9617e-2ece-45a0-8654-fe89ffcb8efa service nova] Acquired lock "refresh_cache-1e43f6be-f6a3-4569-adea-c82a5d709247" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 996.025860] env[68040]: DEBUG nova.network.neutron [req-a347bf22-0c15-4f21-abbb-23f489ea2506 req-14c9617e-2ece-45a0-8654-fe89ffcb8efa service nova] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Refreshing network info cache for port c955dfdf-4784-4f75-880e-af722cfc6d6c {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 996.198357] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200221, 'name': CreateVM_Task, 'duration_secs': 0.343091} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.200899] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Created VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 996.201584] env[68040]: DEBUG oslo_concurrency.lockutils [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 996.201858] env[68040]: DEBUG oslo_concurrency.lockutils [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 996.202174] env[68040]: DEBUG oslo_concurrency.lockutils [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 996.202724] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-132014db-acab-4ae0-a4f6-aeb06feab046 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.207551] env[68040]: DEBUG oslo_vmware.api [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Waiting for the task: (returnval){ [ 996.207551] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]522416e3-1296-e07a-43cf-08cd118bb089" [ 996.207551] env[68040]: _type = "Task" [ 996.207551] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.215064] env[68040]: DEBUG oslo_vmware.api [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]522416e3-1296-e07a-43cf-08cd118bb089, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.295291] env[68040]: DEBUG nova.network.neutron [req-a347bf22-0c15-4f21-abbb-23f489ea2506 req-14c9617e-2ece-45a0-8654-fe89ffcb8efa service nova] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Updated VIF entry in instance network info cache for port c955dfdf-4784-4f75-880e-af722cfc6d6c. {{(pid=68040) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 996.295637] env[68040]: DEBUG nova.network.neutron [req-a347bf22-0c15-4f21-abbb-23f489ea2506 req-14c9617e-2ece-45a0-8654-fe89ffcb8efa service nova] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Updating instance_info_cache with network_info: [{"id": "c955dfdf-4784-4f75-880e-af722cfc6d6c", "address": "fa:16:3e:0e:4e:40", "network": {"id": "9565e3df-4a40-4611-a5a9-efd2bc66053b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-780365588-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9a2c3ee9bf1c40228a089e4b0e5bff00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc955dfdf-47", "ovs_interfaceid": "c955dfdf-4784-4f75-880e-af722cfc6d6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 996.305672] env[68040]: DEBUG oslo_concurrency.lockutils [req-a347bf22-0c15-4f21-abbb-23f489ea2506 req-14c9617e-2ece-45a0-8654-fe89ffcb8efa service nova] Releasing lock "refresh_cache-1e43f6be-f6a3-4569-adea-c82a5d709247" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 996.719346] env[68040]: DEBUG oslo_concurrency.lockutils [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 996.719645] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Processing image 8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 996.719799] env[68040]: DEBUG oslo_concurrency.lockutils [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1006.433129] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a65d0288-f65a-416c-9b9c-42018bb19f4a tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Acquiring lock "1e43f6be-f6a3-4569-adea-c82a5d709247" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1022.984375] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1022.984375] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1022.997033] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1022.997033] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1022.997033] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1022.997033] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68040) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1022.997447] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25739d5e-9082-40be-860e-5c31e49a4678 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.006411] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1caa7982-a34c-40b4-a1cd-320f8f1d6a1f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.020740] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea3b030a-fef8-4eca-8dd2-c74966a2b3ee {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.027174] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d851a697-c424-4cf3-a73f-4ff212ea11c3 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.055275] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180969MB free_disk=125GB free_vcpus=48 pci_devices=None {{(pid=68040) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1023.055460] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1023.055757] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1023.143699] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 97b050ff-2997-4504-8787-04f1221251b8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1023.143876] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance b81d413c-2449-471a-b3d9-693fc0ab2824 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1023.144013] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 39de4e78-44cd-4582-998e-88ce6de2d51c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1023.144145] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance a89ff564-ea35-4000-8efa-2c1ec2b61759 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1023.144266] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 42f39352-e703-4ebf-9559-4c8b5abca70e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1023.144384] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance d1819f29-a891-47dd-a456-8f3b127daf6f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1023.144500] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 1812f13e-b03d-48d4-940a-43974784265b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1023.144615] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance de1b8ef9-0088-4d2a-985e-d04fcff55d31 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1023.144736] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance bce68a2b-260c-45cc-ac98-d4b01b4513a4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1023.144868] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 1e43f6be-f6a3-4569-adea-c82a5d709247 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1023.155774] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 3738de32-79cd-4b04-8081-cc1146730c75 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1023.165569] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance b9c0cbae-d76a-4ec9-9cc8-727d011dc5f1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1023.175803] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance a51e5779-42bf-4281-8c46-1dcc771382ae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1023.185673] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance f740b2a2-a3f5-45d7-913f-5455236a2620 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1023.196133] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1023.206096] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 3f9438b9-bfe3-4a7f-bfec-e140fed5f66c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1023.215156] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance e2cd4cbd-279b-4852-85b0-f78af45bf7e2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1023.224940] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 8ec0f6bd-4a3e-4e70-b310-714676607b9c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1023.233926] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 25298be1-8cc9-46fa-9b33-62425bcb91dc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1023.243337] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 67756ba9-5738-4669-ace9-a3d2f1952dfa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1023.252709] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 940a6a43-d74e-419c-af5b-92c991e3649d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1023.263802] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 1b4b422a-f096-4ed1-9d47-f150e7a3434f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1023.276902] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 3e7cb203-0bad-49d0-83d4-b5a086c31ad6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1023.277168] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1023.277328] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1023.293497] env[68040]: DEBUG nova.scheduler.client.report [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Refreshing inventories for resource provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 1023.307449] env[68040]: DEBUG nova.scheduler.client.report [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Updating ProviderTree inventory for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 1023.307639] env[68040]: DEBUG nova.compute.provider_tree [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Updating inventory in ProviderTree for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1023.318677] env[68040]: DEBUG nova.scheduler.client.report [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Refreshing aggregate associations for resource provider 22db6f73-b3da-436a-bf40-9c8c240b2e44, aggregates: None {{(pid=68040) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 1023.336249] env[68040]: DEBUG nova.scheduler.client.report [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Refreshing trait associations for resource provider 22db6f73-b3da-436a-bf40-9c8c240b2e44, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=68040) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 1023.591513] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17b82793-dd61-4777-8f27-1bedcc067184 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.599328] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-409e5a86-c25e-4b7e-a632-d79195182c88 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.628698] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d103a46-d6b0-4c8b-b55d-2477abc15695 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.635963] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-234f6ddc-47ca-44ca-83f7-b3f2c639d56f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.651144] env[68040]: DEBUG nova.compute.provider_tree [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1023.659091] env[68040]: DEBUG nova.scheduler.client.report [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1023.673532] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68040) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1023.674553] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.618s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1024.674025] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1024.984523] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1024.984726] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Starting heal instance info cache {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1024.984820] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Rebuilding the list of instances to heal {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1025.007917] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1025.008121] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1025.008260] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1025.008389] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1025.008512] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1025.008632] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1025.008750] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1025.008869] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1025.008986] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1025.009114] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1025.009232] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Didn't find any instances for network info cache update. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1025.983916] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1025.984323] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1025.984323] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1026.979571] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1027.984801] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1027.985109] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68040) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1042.975184] env[68040]: WARNING oslo_vmware.rw_handles [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1042.975184] env[68040]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1042.975184] env[68040]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1042.975184] env[68040]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1042.975184] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1042.975184] env[68040]: ERROR oslo_vmware.rw_handles response.begin() [ 1042.975184] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1042.975184] env[68040]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1042.975184] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1042.975184] env[68040]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1042.975184] env[68040]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1042.975184] env[68040]: ERROR oslo_vmware.rw_handles [ 1042.975777] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Downloaded image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to vmware_temp/007e3852-c68c-4c62-9ed8-e7c52ae627cf/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1042.977900] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Caching image {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1042.978274] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Copying Virtual Disk [datastore2] vmware_temp/007e3852-c68c-4c62-9ed8-e7c52ae627cf/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk to [datastore2] vmware_temp/007e3852-c68c-4c62-9ed8-e7c52ae627cf/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk {{(pid=68040) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1042.978670] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cfa1a39c-284d-4141-b52a-43224f9a3e25 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.988767] env[68040]: DEBUG oslo_vmware.api [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Waiting for the task: (returnval){ [ 1042.988767] env[68040]: value = "task-3200222" [ 1042.988767] env[68040]: _type = "Task" [ 1042.988767] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.996918] env[68040]: DEBUG oslo_vmware.api [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Task: {'id': task-3200222, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.499354] env[68040]: DEBUG oslo_vmware.exceptions [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Fault InvalidArgument not matched. {{(pid=68040) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1043.499630] env[68040]: DEBUG oslo_concurrency.lockutils [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1043.500251] env[68040]: ERROR nova.compute.manager [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1043.500251] env[68040]: Faults: ['InvalidArgument'] [ 1043.500251] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] Traceback (most recent call last): [ 1043.500251] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1043.500251] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] yield resources [ 1043.500251] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1043.500251] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] self.driver.spawn(context, instance, image_meta, [ 1043.500251] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1043.500251] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1043.500251] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1043.500251] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] self._fetch_image_if_missing(context, vi) [ 1043.500251] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1043.500750] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] image_cache(vi, tmp_image_ds_loc) [ 1043.500750] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1043.500750] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] vm_util.copy_virtual_disk( [ 1043.500750] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1043.500750] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] session._wait_for_task(vmdk_copy_task) [ 1043.500750] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1043.500750] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] return self.wait_for_task(task_ref) [ 1043.500750] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1043.500750] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] return evt.wait() [ 1043.500750] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1043.500750] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] result = hub.switch() [ 1043.500750] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1043.500750] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] return self.greenlet.switch() [ 1043.501652] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1043.501652] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] self.f(*self.args, **self.kw) [ 1043.501652] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1043.501652] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] raise exceptions.translate_fault(task_info.error) [ 1043.501652] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1043.501652] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] Faults: ['InvalidArgument'] [ 1043.501652] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] [ 1043.501652] env[68040]: INFO nova.compute.manager [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Terminating instance [ 1043.502153] env[68040]: DEBUG oslo_concurrency.lockutils [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1043.502362] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1043.502595] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7ae55340-c8c1-4a8c-83eb-541083abb43e {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.505028] env[68040]: DEBUG nova.compute.manager [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1043.505148] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1043.505881] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-106e308e-75d8-4383-b4af-fb7e4f770ef0 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.512881] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Unregistering the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1043.513117] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-63bde3ec-0bc4-41db-bcfc-6268fa7e0832 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.515225] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1043.515396] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68040) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1043.516366] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e1a96dd3-545f-41e5-ac8e-c439a00cae55 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.521146] env[68040]: DEBUG oslo_vmware.api [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Waiting for the task: (returnval){ [ 1043.521146] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]5271358d-9f89-5592-13d2-661607d48422" [ 1043.521146] env[68040]: _type = "Task" [ 1043.521146] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.532423] env[68040]: DEBUG oslo_vmware.api [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]5271358d-9f89-5592-13d2-661607d48422, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.581038] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Unregistered the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1043.581216] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Deleting contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1043.581405] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Deleting the datastore file [datastore2] 97b050ff-2997-4504-8787-04f1221251b8 {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1043.581668] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7af46ce5-5860-4495-adc8-8dc1ddab8175 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.587739] env[68040]: DEBUG oslo_vmware.api [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Waiting for the task: (returnval){ [ 1043.587739] env[68040]: value = "task-3200224" [ 1043.587739] env[68040]: _type = "Task" [ 1043.587739] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.596118] env[68040]: DEBUG oslo_vmware.api [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Task: {'id': task-3200224, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.031990] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Preparing fetch location {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1044.032304] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Creating directory with path [datastore2] vmware_temp/4aa22323-a767-418a-9a35-c84083a05167/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1044.032436] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0adab5bd-1058-47bb-b643-fb0819973270 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.044520] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Created directory with path [datastore2] vmware_temp/4aa22323-a767-418a-9a35-c84083a05167/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1044.044717] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Fetch image to [datastore2] vmware_temp/4aa22323-a767-418a-9a35-c84083a05167/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1044.044895] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to [datastore2] vmware_temp/4aa22323-a767-418a-9a35-c84083a05167/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1044.045667] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19644fe6-561b-435c-a87d-d77b9ec5c0a1 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.052466] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05a7ed38-0965-48dc-a431-2ba9694b7322 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.061361] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-114160bb-8237-47cc-82d1-5f7c148c52c0 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.095169] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07cde54b-0fab-4557-af1b-43003304b8fc {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.102062] env[68040]: DEBUG oslo_vmware.api [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Task: {'id': task-3200224, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.073345} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.103508] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Deleted the datastore file {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1044.103702] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Deleted contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1044.103873] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1044.104057] env[68040]: INFO nova.compute.manager [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1044.105955] env[68040]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-10587e87-81b6-4842-a308-3f43935ab953 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.107742] env[68040]: DEBUG nova.compute.claims [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Aborting claim: {{(pid=68040) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1044.107917] env[68040]: DEBUG oslo_concurrency.lockutils [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1044.108166] env[68040]: DEBUG oslo_concurrency.lockutils [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1044.130430] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1044.184334] env[68040]: DEBUG oslo_vmware.rw_handles [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4aa22323-a767-418a-9a35-c84083a05167/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1044.251023] env[68040]: DEBUG oslo_vmware.rw_handles [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Completed reading data from the image iterator. {{(pid=68040) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1044.251023] env[68040]: DEBUG oslo_vmware.rw_handles [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4aa22323-a767-418a-9a35-c84083a05167/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1044.459708] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaf8e5b6-203b-465d-86ac-eec43f613daf {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.467251] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2c679ca-287f-4b75-859c-5457ea26d4b1 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.496768] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c72efec9-6a0c-4e44-8317-d66fd1bc6e30 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.504678] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbbc9430-fcab-4d01-b9ce-d2a0b2864161 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.518645] env[68040]: DEBUG nova.compute.provider_tree [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1044.527018] env[68040]: DEBUG nova.scheduler.client.report [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1044.540257] env[68040]: DEBUG oslo_concurrency.lockutils [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.432s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1044.540765] env[68040]: ERROR nova.compute.manager [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1044.540765] env[68040]: Faults: ['InvalidArgument'] [ 1044.540765] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] Traceback (most recent call last): [ 1044.540765] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1044.540765] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] self.driver.spawn(context, instance, image_meta, [ 1044.540765] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1044.540765] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1044.540765] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1044.540765] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] self._fetch_image_if_missing(context, vi) [ 1044.540765] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1044.540765] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] image_cache(vi, tmp_image_ds_loc) [ 1044.540765] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1044.541223] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] vm_util.copy_virtual_disk( [ 1044.541223] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1044.541223] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] session._wait_for_task(vmdk_copy_task) [ 1044.541223] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1044.541223] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] return self.wait_for_task(task_ref) [ 1044.541223] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1044.541223] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] return evt.wait() [ 1044.541223] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1044.541223] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] result = hub.switch() [ 1044.541223] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1044.541223] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] return self.greenlet.switch() [ 1044.541223] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1044.541223] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] self.f(*self.args, **self.kw) [ 1044.541695] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1044.541695] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] raise exceptions.translate_fault(task_info.error) [ 1044.541695] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1044.541695] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] Faults: ['InvalidArgument'] [ 1044.541695] env[68040]: ERROR nova.compute.manager [instance: 97b050ff-2997-4504-8787-04f1221251b8] [ 1044.541695] env[68040]: DEBUG nova.compute.utils [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 97b050ff-2997-4504-8787-04f1221251b8] VimFaultException {{(pid=68040) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1044.542875] env[68040]: DEBUG nova.compute.manager [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Build of instance 97b050ff-2997-4504-8787-04f1221251b8 was re-scheduled: A specified parameter was not correct: fileType [ 1044.542875] env[68040]: Faults: ['InvalidArgument'] {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1044.543261] env[68040]: DEBUG nova.compute.manager [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Unplugging VIFs for instance {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1044.543458] env[68040]: DEBUG nova.compute.manager [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1044.543645] env[68040]: DEBUG nova.compute.manager [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1044.543811] env[68040]: DEBUG nova.network.neutron [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 97b050ff-2997-4504-8787-04f1221251b8] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1045.050671] env[68040]: DEBUG nova.network.neutron [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1045.062387] env[68040]: INFO nova.compute.manager [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Took 0.52 seconds to deallocate network for instance. [ 1045.166175] env[68040]: INFO nova.scheduler.client.report [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Deleted allocations for instance 97b050ff-2997-4504-8787-04f1221251b8 [ 1045.185907] env[68040]: DEBUG oslo_concurrency.lockutils [None req-d2b578bc-7d56-4e79-a7fa-7bfe06b96ac2 tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Lock "97b050ff-2997-4504-8787-04f1221251b8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 386.064s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1045.187196] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e3dd839b-a190-4cc9-b60e-3a463a3a1bba tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Lock "97b050ff-2997-4504-8787-04f1221251b8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 187.385s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1045.187424] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e3dd839b-a190-4cc9-b60e-3a463a3a1bba tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Acquiring lock "97b050ff-2997-4504-8787-04f1221251b8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1045.187634] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e3dd839b-a190-4cc9-b60e-3a463a3a1bba tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Lock "97b050ff-2997-4504-8787-04f1221251b8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1045.187801] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e3dd839b-a190-4cc9-b60e-3a463a3a1bba tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Lock "97b050ff-2997-4504-8787-04f1221251b8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1045.189770] env[68040]: INFO nova.compute.manager [None req-e3dd839b-a190-4cc9-b60e-3a463a3a1bba tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Terminating instance [ 1045.191636] env[68040]: DEBUG nova.compute.manager [None req-e3dd839b-a190-4cc9-b60e-3a463a3a1bba tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1045.191773] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-e3dd839b-a190-4cc9-b60e-3a463a3a1bba tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1045.192015] env[68040]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c25bbabb-52ef-44e5-9965-301e5d5c5683 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.202373] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94f25f8e-471c-4ac7-abd6-87d26435c108 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.213459] env[68040]: DEBUG nova.compute.manager [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1045.234088] env[68040]: WARNING nova.virt.vmwareapi.vmops [None req-e3dd839b-a190-4cc9-b60e-3a463a3a1bba tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 97b050ff-2997-4504-8787-04f1221251b8 could not be found. [ 1045.234306] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-e3dd839b-a190-4cc9-b60e-3a463a3a1bba tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1045.234486] env[68040]: INFO nova.compute.manager [None req-e3dd839b-a190-4cc9-b60e-3a463a3a1bba tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1045.234740] env[68040]: DEBUG oslo.service.loopingcall [None req-e3dd839b-a190-4cc9-b60e-3a463a3a1bba tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1045.234965] env[68040]: DEBUG nova.compute.manager [-] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1045.235080] env[68040]: DEBUG nova.network.neutron [-] [instance: 97b050ff-2997-4504-8787-04f1221251b8] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1045.267226] env[68040]: DEBUG nova.network.neutron [-] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1045.267226] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1045.267226] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1045.268646] env[68040]: INFO nova.compute.claims [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1045.273138] env[68040]: INFO nova.compute.manager [-] [instance: 97b050ff-2997-4504-8787-04f1221251b8] Took 0.04 seconds to deallocate network for instance. [ 1045.379598] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e3dd839b-a190-4cc9-b60e-3a463a3a1bba tempest-ServersAdminTestJSON-1902622098 tempest-ServersAdminTestJSON-1902622098-project-member] Lock "97b050ff-2997-4504-8787-04f1221251b8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.192s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1045.621668] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d35a200-b969-4972-b635-9fb83edbf082 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.629381] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6470cc27-ed5c-4378-8e2d-0724f5c44134 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.664183] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9e1b223-06cd-41ab-bfb4-1082e376f0c7 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.671957] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2724a18d-b464-43b6-970e-de91133908d5 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.684932] env[68040]: DEBUG nova.compute.provider_tree [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1045.698088] env[68040]: DEBUG nova.scheduler.client.report [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1045.714998] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.448s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1045.715532] env[68040]: DEBUG nova.compute.manager [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Start building networks asynchronously for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1045.753679] env[68040]: DEBUG nova.compute.utils [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Using /dev/sd instead of None {{(pid=68040) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1045.755043] env[68040]: DEBUG nova.compute.manager [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Not allocating networking since 'none' was specified. {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1968}} [ 1045.764165] env[68040]: DEBUG nova.compute.manager [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Start building block device mappings for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1045.826460] env[68040]: DEBUG nova.compute.manager [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Start spawning the instance on the hypervisor. {{(pid=68040) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1045.853091] env[68040]: DEBUG nova.virt.hardware [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:59:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:59:33Z,direct_url=,disk_format='vmdk',id=8c308313-03d5-40b6-a5fe-9037e32dc76e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0770d674a39c40089de0aade9440b370',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:59:34Z,virtual_size=,visibility=), allow threads: False {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1045.853343] env[68040]: DEBUG nova.virt.hardware [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Flavor limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1045.853502] env[68040]: DEBUG nova.virt.hardware [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Image limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1045.853682] env[68040]: DEBUG nova.virt.hardware [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Flavor pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1045.853830] env[68040]: DEBUG nova.virt.hardware [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Image pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1045.853979] env[68040]: DEBUG nova.virt.hardware [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1045.854427] env[68040]: DEBUG nova.virt.hardware [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1045.854641] env[68040]: DEBUG nova.virt.hardware [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1045.854846] env[68040]: DEBUG nova.virt.hardware [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Got 1 possible topologies {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1045.855055] env[68040]: DEBUG nova.virt.hardware [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1045.855242] env[68040]: DEBUG nova.virt.hardware [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1045.856105] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80339a43-e2ae-4d4a-9695-dcac081cd756 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.866388] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efb5de83-d5b0-4cf1-ad11-4b29d4750b00 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.880859] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Instance VIF info [] {{(pid=68040) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1045.886256] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Creating folder: Project (7876e56ccbee4e10a9fed143f766ed40). Parent ref: group-v639956. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1045.886514] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3cd3dc74-a000-4036-a4f9-1789a6bb6aba {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.897306] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Created folder: Project (7876e56ccbee4e10a9fed143f766ed40) in parent group-v639956. [ 1045.897492] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Creating folder: Instances. Parent ref: group-v640010. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1045.897706] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7f596f42-4387-417a-a0c1-d439ccef9566 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.906535] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Created folder: Instances in parent group-v640010. [ 1045.906770] env[68040]: DEBUG oslo.service.loopingcall [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1045.906955] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Creating VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1045.907195] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0c9c587d-d90e-4589-86ab-08715028085e {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.923190] env[68040]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1045.923190] env[68040]: value = "task-3200227" [ 1045.923190] env[68040]: _type = "Task" [ 1045.923190] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.930487] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200227, 'name': CreateVM_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.433879] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200227, 'name': CreateVM_Task, 'duration_secs': 0.259746} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.434280] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Created VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1046.434722] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1046.434932] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1046.435270] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1046.435519] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4db80453-5f61-4ab6-b5b1-b3bf0991cc18 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.440260] env[68040]: DEBUG oslo_vmware.api [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Waiting for the task: (returnval){ [ 1046.440260] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]5204afa4-be9b-3f9e-b344-9bdf06622364" [ 1046.440260] env[68040]: _type = "Task" [ 1046.440260] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.447734] env[68040]: DEBUG oslo_vmware.api [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]5204afa4-be9b-3f9e-b344-9bdf06622364, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.956121] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1046.956121] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Processing image 8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1046.956121] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1048.842389] env[68040]: DEBUG oslo_concurrency.lockutils [None req-3f01ee9e-35c0-427d-9702-881f69100960 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Acquiring lock "3738de32-79cd-4b04-8081-cc1146730c75" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1056.517236] env[68040]: DEBUG oslo_concurrency.lockutils [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Acquiring lock "b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1056.517564] env[68040]: DEBUG oslo_concurrency.lockutils [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Lock "b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1063.613182] env[68040]: DEBUG oslo_concurrency.lockutils [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Acquiring lock "e92b662c-b458-49d8-ac2a-00ae6046a11b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1063.613182] env[68040]: DEBUG oslo_concurrency.lockutils [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Lock "e92b662c-b458-49d8-ac2a-00ae6046a11b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1070.501756] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Acquiring lock "57cd94c2-aec3-427e-9b9f-a444fe291974" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1070.502039] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Lock "57cd94c2-aec3-427e-9b9f-a444fe291974" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1071.654985] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Acquiring lock "87a7851e-d6fe-481a-8abb-5732e281cb64" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1071.655364] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Lock "87a7851e-d6fe-481a-8abb-5732e281cb64" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1076.432238] env[68040]: DEBUG oslo_concurrency.lockutils [None req-8f5a77d5-0a71-4401-b529-3c7fd1e6caca tempest-ServersTestJSON-1745355635 tempest-ServersTestJSON-1745355635-project-member] Acquiring lock "bd14d08b-d71a-43b0-b72a-6504dc0e2142" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1076.432538] env[68040]: DEBUG oslo_concurrency.lockutils [None req-8f5a77d5-0a71-4401-b529-3c7fd1e6caca tempest-ServersTestJSON-1745355635 tempest-ServersTestJSON-1745355635-project-member] Lock "bd14d08b-d71a-43b0-b72a-6504dc0e2142" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1082.580644] env[68040]: DEBUG oslo_concurrency.lockutils [None req-618f0b27-75aa-434c-a2b6-3b8e177fbe65 tempest-AttachInterfacesTestJSON-1449330779 tempest-AttachInterfacesTestJSON-1449330779-project-member] Acquiring lock "00305041-c0c0-4b7b-9149-9bcba4392279" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1082.581019] env[68040]: DEBUG oslo_concurrency.lockutils [None req-618f0b27-75aa-434c-a2b6-3b8e177fbe65 tempest-AttachInterfacesTestJSON-1449330779 tempest-AttachInterfacesTestJSON-1449330779-project-member] Lock "00305041-c0c0-4b7b-9149-9bcba4392279" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1083.556699] env[68040]: DEBUG oslo_concurrency.lockutils [None req-0be19fd9-696b-49de-bdcc-2dd65ee2b98f tempest-ServerRescueTestJSONUnderV235-1504598290 tempest-ServerRescueTestJSONUnderV235-1504598290-project-member] Acquiring lock "6011cb9b-e882-4eb2-96b4-82a43585acbc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1083.556933] env[68040]: DEBUG oslo_concurrency.lockutils [None req-0be19fd9-696b-49de-bdcc-2dd65ee2b98f tempest-ServerRescueTestJSONUnderV235-1504598290 tempest-ServerRescueTestJSONUnderV235-1504598290-project-member] Lock "6011cb9b-e882-4eb2-96b4-82a43585acbc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1083.984609] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1083.984609] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1083.997762] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1083.997952] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1083.998189] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1083.998386] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68040) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1083.999895] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8732bb6-cbe6-4763-9d0b-d200a1c32505 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.009098] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d50702d5-6c66-4282-9874-555ce1d9f369 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.024447] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d119fe0a-4b15-401b-a826-21498f83e0dc {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.033840] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d8d700a-872a-4477-9ed3-5e2c0ef08327 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.064891] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180956MB free_disk=125GB free_vcpus=48 pci_devices=None {{(pid=68040) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1084.065034] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1084.065255] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1084.152120] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance b81d413c-2449-471a-b3d9-693fc0ab2824 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1084.152414] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 39de4e78-44cd-4582-998e-88ce6de2d51c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1084.152454] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance a89ff564-ea35-4000-8efa-2c1ec2b61759 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1084.152552] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 42f39352-e703-4ebf-9559-4c8b5abca70e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1084.152696] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance d1819f29-a891-47dd-a456-8f3b127daf6f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1084.152808] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 1812f13e-b03d-48d4-940a-43974784265b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1084.152907] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance de1b8ef9-0088-4d2a-985e-d04fcff55d31 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1084.153037] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance bce68a2b-260c-45cc-ac98-d4b01b4513a4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1084.153158] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 1e43f6be-f6a3-4569-adea-c82a5d709247 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1084.153950] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 3738de32-79cd-4b04-8081-cc1146730c75 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1084.169120] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance f740b2a2-a3f5-45d7-913f-5455236a2620 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1084.180888] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1084.195453] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 3f9438b9-bfe3-4a7f-bfec-e140fed5f66c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1084.206640] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance e2cd4cbd-279b-4852-85b0-f78af45bf7e2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1084.219483] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 8ec0f6bd-4a3e-4e70-b310-714676607b9c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1084.230979] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 25298be1-8cc9-46fa-9b33-62425bcb91dc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1084.241534] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 67756ba9-5738-4669-ace9-a3d2f1952dfa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1084.257236] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 940a6a43-d74e-419c-af5b-92c991e3649d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1084.270923] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 1b4b422a-f096-4ed1-9d47-f150e7a3434f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1084.281778] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 3e7cb203-0bad-49d0-83d4-b5a086c31ad6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1084.293790] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1084.307296] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance e92b662c-b458-49d8-ac2a-00ae6046a11b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1084.321531] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 57cd94c2-aec3-427e-9b9f-a444fe291974 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1084.334659] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 87a7851e-d6fe-481a-8abb-5732e281cb64 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1084.347902] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance bd14d08b-d71a-43b0-b72a-6504dc0e2142 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1084.359793] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 00305041-c0c0-4b7b-9149-9bcba4392279 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1084.379055] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 6011cb9b-e882-4eb2-96b4-82a43585acbc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1084.379055] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1084.379055] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1084.786549] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-546693d0-f242-442c-a58e-56048881abc3 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.794836] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64493168-193b-4882-b5e4-9ea6268b52ed {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.828670] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d26a2f1-d302-4d49-99d4-e76e132b01e1 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.836441] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-034020ad-cf53-402c-9371-bee8acc97401 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.849706] env[68040]: DEBUG nova.compute.provider_tree [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1084.858028] env[68040]: DEBUG nova.scheduler.client.report [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1084.872706] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68040) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1084.872945] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.808s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1085.743679] env[68040]: DEBUG oslo_concurrency.lockutils [None req-3fb08628-7aa9-4881-bc32-83c863bc6352 tempest-ServersV294TestFqdnHostnames-341872804 tempest-ServersV294TestFqdnHostnames-341872804-project-member] Acquiring lock "81cfab4f-6a32-42b0-bbfc-45596bc9ad4e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1085.743982] env[68040]: DEBUG oslo_concurrency.lockutils [None req-3fb08628-7aa9-4881-bc32-83c863bc6352 tempest-ServersV294TestFqdnHostnames-341872804 tempest-ServersV294TestFqdnHostnames-341872804-project-member] Lock "81cfab4f-6a32-42b0-bbfc-45596bc9ad4e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1085.876626] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1085.984238] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1085.984419] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Starting heal instance info cache {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1085.984543] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Rebuilding the list of instances to heal {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1086.008968] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1086.008968] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1086.008968] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1086.008968] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1086.008968] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1086.009168] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1086.009236] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1086.009357] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1086.009475] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1086.009593] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1086.009713] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Didn't find any instances for network info cache update. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1086.010506] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1086.983079] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1087.979452] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1088.001795] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1089.001815] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1089.983846] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1089.984034] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68040) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1091.233203] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a193e22c-3fc1-4312-b71e-31f856aa9166 tempest-VolumesAdminNegativeTest-1864683811 tempest-VolumesAdminNegativeTest-1864683811-project-member] Acquiring lock "f6edf79d-5eff-4e2c-94d2-aa5cf1731748" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1091.233448] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a193e22c-3fc1-4312-b71e-31f856aa9166 tempest-VolumesAdminNegativeTest-1864683811 tempest-VolumesAdminNegativeTest-1864683811-project-member] Lock "f6edf79d-5eff-4e2c-94d2-aa5cf1731748" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1091.866954] env[68040]: WARNING oslo_vmware.rw_handles [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1091.866954] env[68040]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1091.866954] env[68040]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1091.866954] env[68040]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1091.866954] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1091.866954] env[68040]: ERROR oslo_vmware.rw_handles response.begin() [ 1091.866954] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1091.866954] env[68040]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1091.866954] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1091.866954] env[68040]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1091.866954] env[68040]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1091.866954] env[68040]: ERROR oslo_vmware.rw_handles [ 1091.867385] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Downloaded image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to vmware_temp/4aa22323-a767-418a-9a35-c84083a05167/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1091.869348] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Caching image {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1091.869596] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Copying Virtual Disk [datastore2] vmware_temp/4aa22323-a767-418a-9a35-c84083a05167/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk to [datastore2] vmware_temp/4aa22323-a767-418a-9a35-c84083a05167/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk {{(pid=68040) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1091.869877] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-424b8fdc-affc-4f71-a27f-6d818afc1644 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.877890] env[68040]: DEBUG oslo_vmware.api [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Waiting for the task: (returnval){ [ 1091.877890] env[68040]: value = "task-3200228" [ 1091.877890] env[68040]: _type = "Task" [ 1091.877890] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.886064] env[68040]: DEBUG oslo_vmware.api [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Task: {'id': task-3200228, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.388666] env[68040]: DEBUG oslo_vmware.exceptions [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Fault InvalidArgument not matched. {{(pid=68040) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1092.388666] env[68040]: DEBUG oslo_concurrency.lockutils [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1092.389242] env[68040]: ERROR nova.compute.manager [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1092.389242] env[68040]: Faults: ['InvalidArgument'] [ 1092.389242] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Traceback (most recent call last): [ 1092.389242] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1092.389242] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] yield resources [ 1092.389242] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1092.389242] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] self.driver.spawn(context, instance, image_meta, [ 1092.389242] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1092.389242] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1092.389242] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1092.389242] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] self._fetch_image_if_missing(context, vi) [ 1092.389242] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1092.389544] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] image_cache(vi, tmp_image_ds_loc) [ 1092.389544] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1092.389544] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] vm_util.copy_virtual_disk( [ 1092.389544] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1092.389544] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] session._wait_for_task(vmdk_copy_task) [ 1092.389544] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1092.389544] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] return self.wait_for_task(task_ref) [ 1092.389544] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1092.389544] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] return evt.wait() [ 1092.389544] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1092.389544] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] result = hub.switch() [ 1092.389544] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1092.389544] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] return self.greenlet.switch() [ 1092.389829] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1092.389829] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] self.f(*self.args, **self.kw) [ 1092.389829] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1092.389829] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] raise exceptions.translate_fault(task_info.error) [ 1092.389829] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1092.389829] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Faults: ['InvalidArgument'] [ 1092.389829] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] [ 1092.389829] env[68040]: INFO nova.compute.manager [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Terminating instance [ 1092.391849] env[68040]: DEBUG nova.compute.manager [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1092.392054] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1092.392349] env[68040]: DEBUG oslo_concurrency.lockutils [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1092.392544] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1092.393314] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4863637a-9e03-4e06-9705-ae68a7c37a14 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.398029] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-aa5238c2-c201-4790-b8de-dd6a9d8bc6da {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.401883] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Unregistering the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1092.402200] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4d39c996-dc0f-4523-8578-e210eae73091 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.404580] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1092.404749] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68040) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1092.405516] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a0cc419-8068-4594-a8dd-23f2d375cacd {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.410085] env[68040]: DEBUG oslo_vmware.api [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Waiting for the task: (returnval){ [ 1092.410085] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]522a978f-e66a-48bf-2060-8f19e02035fa" [ 1092.410085] env[68040]: _type = "Task" [ 1092.410085] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.421452] env[68040]: DEBUG oslo_vmware.api [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]522a978f-e66a-48bf-2060-8f19e02035fa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.473427] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Unregistered the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1092.473661] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Deleting contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1092.473840] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Deleting the datastore file [datastore2] b81d413c-2449-471a-b3d9-693fc0ab2824 {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1092.474131] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-44e9dcb3-d750-46b6-8bd0-ebfbf955b444 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.480840] env[68040]: DEBUG oslo_vmware.api [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Waiting for the task: (returnval){ [ 1092.480840] env[68040]: value = "task-3200230" [ 1092.480840] env[68040]: _type = "Task" [ 1092.480840] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.489030] env[68040]: DEBUG oslo_vmware.api [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Task: {'id': task-3200230, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.921941] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Preparing fetch location {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1092.922234] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Creating directory with path [datastore2] vmware_temp/4c34450d-522f-4092-bfe4-317988d2df8c/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1092.922472] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ae749280-0b38-46a0-9b92-3fd15a1551e7 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.934829] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Created directory with path [datastore2] vmware_temp/4c34450d-522f-4092-bfe4-317988d2df8c/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1092.934959] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Fetch image to [datastore2] vmware_temp/4c34450d-522f-4092-bfe4-317988d2df8c/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1092.935149] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to [datastore2] vmware_temp/4c34450d-522f-4092-bfe4-317988d2df8c/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1092.935901] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74d859bb-4517-434b-bcf7-c4ea4668befb {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.942375] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a8b6fcc-17e1-41e4-a123-61f0098c6b40 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.951431] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa87f549-ab91-4200-9677-1ce1059f8f40 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.984625] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7faf3f7-05c5-4d88-8db2-4d4583cfe3b3 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.993162] env[68040]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-75e95b3f-e803-4e82-a7d8-129c3e48552f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.994754] env[68040]: DEBUG oslo_vmware.api [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Task: {'id': task-3200230, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.068396} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.994986] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Deleted the datastore file {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1092.995181] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Deleted contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1092.995399] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1092.995579] env[68040]: INFO nova.compute.manager [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1092.997705] env[68040]: DEBUG nova.compute.claims [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Aborting claim: {{(pid=68040) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1092.997905] env[68040]: DEBUG oslo_concurrency.lockutils [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1092.998120] env[68040]: DEBUG oslo_concurrency.lockutils [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1093.021611] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1093.078070] env[68040]: DEBUG oslo_vmware.rw_handles [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4c34450d-522f-4092-bfe4-317988d2df8c/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1093.141642] env[68040]: DEBUG oslo_vmware.rw_handles [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Completed reading data from the image iterator. {{(pid=68040) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1093.141843] env[68040]: DEBUG oslo_vmware.rw_handles [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4c34450d-522f-4092-bfe4-317988d2df8c/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1093.445429] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86e2bde7-7c47-4a54-9643-313407a8565e {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.456107] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c523b90-c5b9-4a77-8e01-41080105fbc2 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.492784] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bf108b9-6ee7-4b17-96f8-1b58b40491e8 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.499868] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df759078-8415-4005-a547-634dee098379 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.512566] env[68040]: DEBUG nova.compute.provider_tree [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1093.521178] env[68040]: DEBUG nova.scheduler.client.report [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1093.541999] env[68040]: DEBUG oslo_concurrency.lockutils [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.544s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1093.542888] env[68040]: ERROR nova.compute.manager [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1093.542888] env[68040]: Faults: ['InvalidArgument'] [ 1093.542888] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Traceback (most recent call last): [ 1093.542888] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1093.542888] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] self.driver.spawn(context, instance, image_meta, [ 1093.542888] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1093.542888] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1093.542888] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1093.542888] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] self._fetch_image_if_missing(context, vi) [ 1093.542888] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1093.542888] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] image_cache(vi, tmp_image_ds_loc) [ 1093.542888] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1093.543215] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] vm_util.copy_virtual_disk( [ 1093.543215] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1093.543215] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] session._wait_for_task(vmdk_copy_task) [ 1093.543215] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1093.543215] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] return self.wait_for_task(task_ref) [ 1093.543215] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1093.543215] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] return evt.wait() [ 1093.543215] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1093.543215] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] result = hub.switch() [ 1093.543215] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1093.543215] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] return self.greenlet.switch() [ 1093.543215] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1093.543215] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] self.f(*self.args, **self.kw) [ 1093.543498] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1093.543498] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] raise exceptions.translate_fault(task_info.error) [ 1093.543498] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1093.543498] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Faults: ['InvalidArgument'] [ 1093.543498] env[68040]: ERROR nova.compute.manager [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] [ 1093.543977] env[68040]: DEBUG nova.compute.utils [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] VimFaultException {{(pid=68040) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1093.546120] env[68040]: DEBUG nova.compute.manager [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Build of instance b81d413c-2449-471a-b3d9-693fc0ab2824 was re-scheduled: A specified parameter was not correct: fileType [ 1093.546120] env[68040]: Faults: ['InvalidArgument'] {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1093.546718] env[68040]: DEBUG nova.compute.manager [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Unplugging VIFs for instance {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1093.547031] env[68040]: DEBUG nova.compute.manager [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1093.547370] env[68040]: DEBUG nova.compute.manager [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1093.547660] env[68040]: DEBUG nova.network.neutron [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1093.855932] env[68040]: DEBUG nova.network.neutron [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1093.869523] env[68040]: INFO nova.compute.manager [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Took 0.32 seconds to deallocate network for instance. [ 1093.975917] env[68040]: INFO nova.scheduler.client.report [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Deleted allocations for instance b81d413c-2449-471a-b3d9-693fc0ab2824 [ 1093.999884] env[68040]: DEBUG oslo_concurrency.lockutils [None req-95c452ae-19d5-43c9-affb-c145dcbe83db tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Lock "b81d413c-2449-471a-b3d9-693fc0ab2824" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 432.337s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1093.999884] env[68040]: DEBUG oslo_concurrency.lockutils [None req-61027daf-3ee3-4eb9-9bdd-3295b1dbcef6 tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Lock "b81d413c-2449-471a-b3d9-693fc0ab2824" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 233.756s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1093.999884] env[68040]: DEBUG oslo_concurrency.lockutils [None req-61027daf-3ee3-4eb9-9bdd-3295b1dbcef6 tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Acquiring lock "b81d413c-2449-471a-b3d9-693fc0ab2824-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1094.000136] env[68040]: DEBUG oslo_concurrency.lockutils [None req-61027daf-3ee3-4eb9-9bdd-3295b1dbcef6 tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Lock "b81d413c-2449-471a-b3d9-693fc0ab2824-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1094.000136] env[68040]: DEBUG oslo_concurrency.lockutils [None req-61027daf-3ee3-4eb9-9bdd-3295b1dbcef6 tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Lock "b81d413c-2449-471a-b3d9-693fc0ab2824-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1094.003101] env[68040]: INFO nova.compute.manager [None req-61027daf-3ee3-4eb9-9bdd-3295b1dbcef6 tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Terminating instance [ 1094.004639] env[68040]: DEBUG nova.compute.manager [None req-61027daf-3ee3-4eb9-9bdd-3295b1dbcef6 tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1094.004840] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-61027daf-3ee3-4eb9-9bdd-3295b1dbcef6 tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1094.005352] env[68040]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c1b3993d-7084-41e4-84ca-2c5d85a3ccff {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.015133] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efa60197-07d2-42c5-bca8-5d3555de0773 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.025852] env[68040]: DEBUG nova.compute.manager [None req-483c24ac-b097-4ee1-a0b8-1614fd992d11 tempest-ServersAaction247Test-113467702 tempest-ServersAaction247Test-113467702-project-member] [instance: b9c0cbae-d76a-4ec9-9cc8-727d011dc5f1] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1094.045910] env[68040]: WARNING nova.virt.vmwareapi.vmops [None req-61027daf-3ee3-4eb9-9bdd-3295b1dbcef6 tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b81d413c-2449-471a-b3d9-693fc0ab2824 could not be found. [ 1094.046132] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-61027daf-3ee3-4eb9-9bdd-3295b1dbcef6 tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1094.046317] env[68040]: INFO nova.compute.manager [None req-61027daf-3ee3-4eb9-9bdd-3295b1dbcef6 tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1094.046563] env[68040]: DEBUG oslo.service.loopingcall [None req-61027daf-3ee3-4eb9-9bdd-3295b1dbcef6 tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1094.046771] env[68040]: DEBUG nova.compute.manager [-] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1094.046865] env[68040]: DEBUG nova.network.neutron [-] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1094.053642] env[68040]: DEBUG nova.compute.manager [None req-483c24ac-b097-4ee1-a0b8-1614fd992d11 tempest-ServersAaction247Test-113467702 tempest-ServersAaction247Test-113467702-project-member] [instance: b9c0cbae-d76a-4ec9-9cc8-727d011dc5f1] Instance disappeared before build. {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1094.070893] env[68040]: DEBUG nova.network.neutron [-] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1094.073527] env[68040]: DEBUG oslo_concurrency.lockutils [None req-483c24ac-b097-4ee1-a0b8-1614fd992d11 tempest-ServersAaction247Test-113467702 tempest-ServersAaction247Test-113467702-project-member] Lock "b9c0cbae-d76a-4ec9-9cc8-727d011dc5f1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 213.445s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1094.078370] env[68040]: INFO nova.compute.manager [-] [instance: b81d413c-2449-471a-b3d9-693fc0ab2824] Took 0.03 seconds to deallocate network for instance. [ 1094.083154] env[68040]: DEBUG nova.compute.manager [None req-3e4dd475-92a0-4f4a-a305-e554b2311927 tempest-AttachInterfacesTestJSON-1449330779 tempest-AttachInterfacesTestJSON-1449330779-project-member] [instance: a51e5779-42bf-4281-8c46-1dcc771382ae] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1094.107565] env[68040]: DEBUG nova.compute.manager [None req-3e4dd475-92a0-4f4a-a305-e554b2311927 tempest-AttachInterfacesTestJSON-1449330779 tempest-AttachInterfacesTestJSON-1449330779-project-member] [instance: a51e5779-42bf-4281-8c46-1dcc771382ae] Instance disappeared before build. {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1094.125954] env[68040]: DEBUG oslo_concurrency.lockutils [None req-3e4dd475-92a0-4f4a-a305-e554b2311927 tempest-AttachInterfacesTestJSON-1449330779 tempest-AttachInterfacesTestJSON-1449330779-project-member] Lock "a51e5779-42bf-4281-8c46-1dcc771382ae" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 212.281s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1094.138408] env[68040]: DEBUG nova.compute.manager [None req-aeafd005-940e-4864-ba05-ef16240ef2ee tempest-VolumesAdminNegativeTest-1864683811 tempest-VolumesAdminNegativeTest-1864683811-project-member] [instance: f740b2a2-a3f5-45d7-913f-5455236a2620] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1094.166141] env[68040]: DEBUG nova.compute.manager [None req-aeafd005-940e-4864-ba05-ef16240ef2ee tempest-VolumesAdminNegativeTest-1864683811 tempest-VolumesAdminNegativeTest-1864683811-project-member] [instance: f740b2a2-a3f5-45d7-913f-5455236a2620] Instance disappeared before build. {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1094.185113] env[68040]: DEBUG oslo_concurrency.lockutils [None req-61027daf-3ee3-4eb9-9bdd-3295b1dbcef6 tempest-ImagesNegativeTestJSON-182694959 tempest-ImagesNegativeTestJSON-182694959-project-member] Lock "b81d413c-2449-471a-b3d9-693fc0ab2824" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.186s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1094.192568] env[68040]: DEBUG oslo_concurrency.lockutils [None req-aeafd005-940e-4864-ba05-ef16240ef2ee tempest-VolumesAdminNegativeTest-1864683811 tempest-VolumesAdminNegativeTest-1864683811-project-member] Lock "f740b2a2-a3f5-45d7-913f-5455236a2620" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 200.317s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1094.206451] env[68040]: DEBUG nova.compute.manager [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1094.252044] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1094.252377] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1094.253776] env[68040]: INFO nova.compute.claims [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1094.608092] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c25603ca-6452-4269-9ce0-e6d4469e2db4 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.616246] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e74fc21d-1fe0-422f-886f-8625c74f0e5e {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.647965] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c17ba01d-493b-4869-9f04-532e32a8d5b0 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.655078] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-308eea93-83f9-42ab-9259-5c45c697b1ac {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.668464] env[68040]: DEBUG nova.compute.provider_tree [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1094.679739] env[68040]: DEBUG nova.scheduler.client.report [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1094.698564] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.446s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1094.699087] env[68040]: DEBUG nova.compute.manager [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Start building networks asynchronously for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1094.734408] env[68040]: DEBUG nova.compute.utils [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Using /dev/sd instead of None {{(pid=68040) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1094.736009] env[68040]: DEBUG nova.compute.manager [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Allocating IP information in the background. {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1094.736107] env[68040]: DEBUG nova.network.neutron [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] allocate_for_instance() {{(pid=68040) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1094.745913] env[68040]: DEBUG nova.compute.manager [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Start building block device mappings for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1094.797810] env[68040]: DEBUG nova.policy [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c8e66b0d9ada4cabbb8efd2e8340a3a9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '52dbf578e94a4db7af130703ad4eb741', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68040) authorize /opt/stack/nova/nova/policy.py:203}} [ 1094.812476] env[68040]: DEBUG nova.compute.manager [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Start spawning the instance on the hypervisor. {{(pid=68040) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1094.840087] env[68040]: DEBUG nova.virt.hardware [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:59:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:59:33Z,direct_url=,disk_format='vmdk',id=8c308313-03d5-40b6-a5fe-9037e32dc76e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0770d674a39c40089de0aade9440b370',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:59:34Z,virtual_size=,visibility=), allow threads: False {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1094.840354] env[68040]: DEBUG nova.virt.hardware [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Flavor limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1094.840515] env[68040]: DEBUG nova.virt.hardware [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Image limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1094.840698] env[68040]: DEBUG nova.virt.hardware [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Flavor pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1094.840846] env[68040]: DEBUG nova.virt.hardware [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Image pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1094.840993] env[68040]: DEBUG nova.virt.hardware [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1094.841282] env[68040]: DEBUG nova.virt.hardware [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1094.841420] env[68040]: DEBUG nova.virt.hardware [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1094.841539] env[68040]: DEBUG nova.virt.hardware [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Got 1 possible topologies {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1094.841704] env[68040]: DEBUG nova.virt.hardware [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1094.841879] env[68040]: DEBUG nova.virt.hardware [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1094.842831] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26aa5c3b-a891-49ee-a596-6897981905a5 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.850630] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc6fe5dd-87dc-45f5-bb5d-62db363b8e2f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.090501] env[68040]: DEBUG nova.network.neutron [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Successfully created port: f1a11219-08c6-4f4d-87ab-2e2530cf6cbd {{(pid=68040) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1095.856050] env[68040]: DEBUG nova.network.neutron [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Successfully updated port: f1a11219-08c6-4f4d-87ab-2e2530cf6cbd {{(pid=68040) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1095.868805] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Acquiring lock "refresh_cache-e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1095.869417] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Acquired lock "refresh_cache-e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1095.869417] env[68040]: DEBUG nova.network.neutron [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Building network info cache for instance {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1095.911237] env[68040]: DEBUG nova.network.neutron [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1096.000817] env[68040]: DEBUG nova.compute.manager [req-3b34726e-3fdf-4dbf-b232-a698f38c5998 req-b5937302-09ce-4ae2-9465-0fec0c8d8c18 service nova] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Received event network-vif-plugged-f1a11219-08c6-4f4d-87ab-2e2530cf6cbd {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1096.001097] env[68040]: DEBUG oslo_concurrency.lockutils [req-3b34726e-3fdf-4dbf-b232-a698f38c5998 req-b5937302-09ce-4ae2-9465-0fec0c8d8c18 service nova] Acquiring lock "e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1096.001338] env[68040]: DEBUG oslo_concurrency.lockutils [req-3b34726e-3fdf-4dbf-b232-a698f38c5998 req-b5937302-09ce-4ae2-9465-0fec0c8d8c18 service nova] Lock "e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1096.001541] env[68040]: DEBUG oslo_concurrency.lockutils [req-3b34726e-3fdf-4dbf-b232-a698f38c5998 req-b5937302-09ce-4ae2-9465-0fec0c8d8c18 service nova] Lock "e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1096.001736] env[68040]: DEBUG nova.compute.manager [req-3b34726e-3fdf-4dbf-b232-a698f38c5998 req-b5937302-09ce-4ae2-9465-0fec0c8d8c18 service nova] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] No waiting events found dispatching network-vif-plugged-f1a11219-08c6-4f4d-87ab-2e2530cf6cbd {{(pid=68040) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1096.001941] env[68040]: WARNING nova.compute.manager [req-3b34726e-3fdf-4dbf-b232-a698f38c5998 req-b5937302-09ce-4ae2-9465-0fec0c8d8c18 service nova] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Received unexpected event network-vif-plugged-f1a11219-08c6-4f4d-87ab-2e2530cf6cbd for instance with vm_state building and task_state spawning. [ 1096.002332] env[68040]: DEBUG nova.compute.manager [req-3b34726e-3fdf-4dbf-b232-a698f38c5998 req-b5937302-09ce-4ae2-9465-0fec0c8d8c18 service nova] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Received event network-changed-f1a11219-08c6-4f4d-87ab-2e2530cf6cbd {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1096.002545] env[68040]: DEBUG nova.compute.manager [req-3b34726e-3fdf-4dbf-b232-a698f38c5998 req-b5937302-09ce-4ae2-9465-0fec0c8d8c18 service nova] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Refreshing instance network info cache due to event network-changed-f1a11219-08c6-4f4d-87ab-2e2530cf6cbd. {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1096.002746] env[68040]: DEBUG oslo_concurrency.lockutils [req-3b34726e-3fdf-4dbf-b232-a698f38c5998 req-b5937302-09ce-4ae2-9465-0fec0c8d8c18 service nova] Acquiring lock "refresh_cache-e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1096.088745] env[68040]: DEBUG nova.network.neutron [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Updating instance_info_cache with network_info: [{"id": "f1a11219-08c6-4f4d-87ab-2e2530cf6cbd", "address": "fa:16:3e:98:e4:8c", "network": {"id": "9839fe48-68c5-4649-bd83-6b4d9c6008e8", "bridge": "br-int", "label": "tempest-ServersTestJSON-1965746643-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52dbf578e94a4db7af130703ad4eb741", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4b033f4d-2e92-4702-add6-410a29d3f251", "external-id": "nsx-vlan-transportzone-649", "segmentation_id": 649, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1a11219-08", "ovs_interfaceid": "f1a11219-08c6-4f4d-87ab-2e2530cf6cbd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1096.100523] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Releasing lock "refresh_cache-e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1096.100809] env[68040]: DEBUG nova.compute.manager [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Instance network_info: |[{"id": "f1a11219-08c6-4f4d-87ab-2e2530cf6cbd", "address": "fa:16:3e:98:e4:8c", "network": {"id": "9839fe48-68c5-4649-bd83-6b4d9c6008e8", "bridge": "br-int", "label": "tempest-ServersTestJSON-1965746643-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52dbf578e94a4db7af130703ad4eb741", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4b033f4d-2e92-4702-add6-410a29d3f251", "external-id": "nsx-vlan-transportzone-649", "segmentation_id": 649, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1a11219-08", "ovs_interfaceid": "f1a11219-08c6-4f4d-87ab-2e2530cf6cbd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1096.101112] env[68040]: DEBUG oslo_concurrency.lockutils [req-3b34726e-3fdf-4dbf-b232-a698f38c5998 req-b5937302-09ce-4ae2-9465-0fec0c8d8c18 service nova] Acquired lock "refresh_cache-e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1096.101295] env[68040]: DEBUG nova.network.neutron [req-3b34726e-3fdf-4dbf-b232-a698f38c5998 req-b5937302-09ce-4ae2-9465-0fec0c8d8c18 service nova] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Refreshing network info cache for port f1a11219-08c6-4f4d-87ab-2e2530cf6cbd {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1096.102331] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:98:e4:8c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4b033f4d-2e92-4702-add6-410a29d3f251', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f1a11219-08c6-4f4d-87ab-2e2530cf6cbd', 'vif_model': 'vmxnet3'}] {{(pid=68040) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1096.110617] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Creating folder: Project (52dbf578e94a4db7af130703ad4eb741). Parent ref: group-v639956. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1096.111580] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-633bc71a-5fa9-452e-ae38-31c25c12546f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.124832] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Created folder: Project (52dbf578e94a4db7af130703ad4eb741) in parent group-v639956. [ 1096.124986] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Creating folder: Instances. Parent ref: group-v640013. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1096.125246] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-12c99c17-0104-418f-b822-37a4d7c7ed42 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.133934] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Created folder: Instances in parent group-v640013. [ 1096.134162] env[68040]: DEBUG oslo.service.loopingcall [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1096.134332] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Creating VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1096.134762] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-97a62aa6-7d67-49f8-9e8d-5c90007c4922 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.156544] env[68040]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1096.156544] env[68040]: value = "task-3200233" [ 1096.156544] env[68040]: _type = "Task" [ 1096.156544] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.164400] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200233, 'name': CreateVM_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.378699] env[68040]: DEBUG nova.network.neutron [req-3b34726e-3fdf-4dbf-b232-a698f38c5998 req-b5937302-09ce-4ae2-9465-0fec0c8d8c18 service nova] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Updated VIF entry in instance network info cache for port f1a11219-08c6-4f4d-87ab-2e2530cf6cbd. {{(pid=68040) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1096.379145] env[68040]: DEBUG nova.network.neutron [req-3b34726e-3fdf-4dbf-b232-a698f38c5998 req-b5937302-09ce-4ae2-9465-0fec0c8d8c18 service nova] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Updating instance_info_cache with network_info: [{"id": "f1a11219-08c6-4f4d-87ab-2e2530cf6cbd", "address": "fa:16:3e:98:e4:8c", "network": {"id": "9839fe48-68c5-4649-bd83-6b4d9c6008e8", "bridge": "br-int", "label": "tempest-ServersTestJSON-1965746643-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52dbf578e94a4db7af130703ad4eb741", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4b033f4d-2e92-4702-add6-410a29d3f251", "external-id": "nsx-vlan-transportzone-649", "segmentation_id": 649, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1a11219-08", "ovs_interfaceid": "f1a11219-08c6-4f4d-87ab-2e2530cf6cbd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1096.390701] env[68040]: DEBUG oslo_concurrency.lockutils [req-3b34726e-3fdf-4dbf-b232-a698f38c5998 req-b5937302-09ce-4ae2-9465-0fec0c8d8c18 service nova] Releasing lock "refresh_cache-e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1096.666806] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200233, 'name': CreateVM_Task, 'duration_secs': 0.278583} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.666946] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Created VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1096.667785] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1096.667785] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1096.668172] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1096.668471] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-77e4412d-8adc-4028-a8c9-3a53b6892b45 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.673089] env[68040]: DEBUG oslo_vmware.api [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Waiting for the task: (returnval){ [ 1096.673089] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52db66a9-2e47-1946-e191-6b98a950083e" [ 1096.673089] env[68040]: _type = "Task" [ 1096.673089] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.681773] env[68040]: DEBUG oslo_vmware.api [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52db66a9-2e47-1946-e191-6b98a950083e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.184126] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1097.184819] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Processing image 8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1097.185219] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1099.611498] env[68040]: DEBUG oslo_concurrency.lockutils [None req-084ca5b5-a407-4835-a64b-021154e15ac9 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Acquiring lock "e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1112.166116] env[68040]: DEBUG oslo_concurrency.lockutils [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Acquiring lock "c581d685-7ea0-41f8-b911-ff1dce1b46c7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1112.166116] env[68040]: DEBUG oslo_concurrency.lockutils [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Lock "c581d685-7ea0-41f8-b911-ff1dce1b46c7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1114.636308] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ffa334c1-46ca-4321-8ef4-84666042e7cc tempest-ServerMetadataNegativeTestJSON-942408065 tempest-ServerMetadataNegativeTestJSON-942408065-project-member] Acquiring lock "310e3ad1-aa4c-44d1-b1e9-152d1de39125" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1114.636608] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ffa334c1-46ca-4321-8ef4-84666042e7cc tempest-ServerMetadataNegativeTestJSON-942408065 tempest-ServerMetadataNegativeTestJSON-942408065-project-member] Lock "310e3ad1-aa4c-44d1-b1e9-152d1de39125" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1116.503690] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4fff48aa-07ba-4031-bce1-a66c45599b83 tempest-ServerGroupTestJSON-1986318540 tempest-ServerGroupTestJSON-1986318540-project-member] Acquiring lock "72f5f3ba-c931-40a5-ab73-4e6738e0aaba" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1116.503983] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4fff48aa-07ba-4031-bce1-a66c45599b83 tempest-ServerGroupTestJSON-1986318540 tempest-ServerGroupTestJSON-1986318540-project-member] Lock "72f5f3ba-c931-40a5-ab73-4e6738e0aaba" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1119.941632] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a0d7a4c2-38ad-452c-b7f1-ca0c8bc2ec0f tempest-ServersTestBootFromVolume-1340526347 tempest-ServersTestBootFromVolume-1340526347-project-member] Acquiring lock "c2f603dd-6a9b-4a0f-b50a-263cf8eb70af" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1119.941917] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a0d7a4c2-38ad-452c-b7f1-ca0c8bc2ec0f tempest-ServersTestBootFromVolume-1340526347 tempest-ServersTestBootFromVolume-1340526347-project-member] Lock "c2f603dd-6a9b-4a0f-b50a-263cf8eb70af" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1123.800977] env[68040]: DEBUG oslo_concurrency.lockutils [None req-844fd553-d870-4df2-bd91-7f10aa5f2aa3 tempest-InstanceActionsV221TestJSON-1568005960 tempest-InstanceActionsV221TestJSON-1568005960-project-member] Acquiring lock "ec66dda9-4e56-4baa-b8aa-8b01f28d8e9c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1123.801306] env[68040]: DEBUG oslo_concurrency.lockutils [None req-844fd553-d870-4df2-bd91-7f10aa5f2aa3 tempest-InstanceActionsV221TestJSON-1568005960 tempest-InstanceActionsV221TestJSON-1568005960-project-member] Lock "ec66dda9-4e56-4baa-b8aa-8b01f28d8e9c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1127.666658] env[68040]: DEBUG oslo_concurrency.lockutils [None req-5cf588d9-4f1f-4813-9b5c-123759522daa tempest-ServersTestMultiNic-1524601141 tempest-ServersTestMultiNic-1524601141-project-member] Acquiring lock "8535d103-7bdf-4210-aa1e-180bb100de5f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1127.666953] env[68040]: DEBUG oslo_concurrency.lockutils [None req-5cf588d9-4f1f-4813-9b5c-123759522daa tempest-ServersTestMultiNic-1524601141 tempest-ServersTestMultiNic-1524601141-project-member] Lock "8535d103-7bdf-4210-aa1e-180bb100de5f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1143.005657] env[68040]: WARNING oslo_vmware.rw_handles [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1143.005657] env[68040]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1143.005657] env[68040]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1143.005657] env[68040]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1143.005657] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1143.005657] env[68040]: ERROR oslo_vmware.rw_handles response.begin() [ 1143.005657] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1143.005657] env[68040]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1143.005657] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1143.005657] env[68040]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1143.005657] env[68040]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1143.005657] env[68040]: ERROR oslo_vmware.rw_handles [ 1143.006428] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Downloaded image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to vmware_temp/4c34450d-522f-4092-bfe4-317988d2df8c/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1143.008201] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Caching image {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1143.008455] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Copying Virtual Disk [datastore2] vmware_temp/4c34450d-522f-4092-bfe4-317988d2df8c/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk to [datastore2] vmware_temp/4c34450d-522f-4092-bfe4-317988d2df8c/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk {{(pid=68040) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1143.008769] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1114eb6c-fa6d-490d-969d-41dd8ad4a3f3 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.019413] env[68040]: DEBUG oslo_vmware.api [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Waiting for the task: (returnval){ [ 1143.019413] env[68040]: value = "task-3200244" [ 1143.019413] env[68040]: _type = "Task" [ 1143.019413] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.028396] env[68040]: DEBUG oslo_vmware.api [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Task: {'id': task-3200244, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.530934] env[68040]: DEBUG oslo_vmware.exceptions [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Fault InvalidArgument not matched. {{(pid=68040) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1143.531312] env[68040]: DEBUG oslo_concurrency.lockutils [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1143.531903] env[68040]: ERROR nova.compute.manager [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1143.531903] env[68040]: Faults: ['InvalidArgument'] [ 1143.531903] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Traceback (most recent call last): [ 1143.531903] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1143.531903] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] yield resources [ 1143.531903] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1143.531903] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] self.driver.spawn(context, instance, image_meta, [ 1143.531903] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1143.531903] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1143.531903] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1143.531903] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] self._fetch_image_if_missing(context, vi) [ 1143.531903] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1143.532291] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] image_cache(vi, tmp_image_ds_loc) [ 1143.532291] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1143.532291] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] vm_util.copy_virtual_disk( [ 1143.532291] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1143.532291] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] session._wait_for_task(vmdk_copy_task) [ 1143.532291] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1143.532291] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] return self.wait_for_task(task_ref) [ 1143.532291] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1143.532291] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] return evt.wait() [ 1143.532291] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1143.532291] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] result = hub.switch() [ 1143.532291] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1143.532291] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] return self.greenlet.switch() [ 1143.532628] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1143.532628] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] self.f(*self.args, **self.kw) [ 1143.532628] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1143.532628] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] raise exceptions.translate_fault(task_info.error) [ 1143.532628] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1143.532628] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Faults: ['InvalidArgument'] [ 1143.532628] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] [ 1143.532628] env[68040]: INFO nova.compute.manager [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Terminating instance [ 1143.534244] env[68040]: DEBUG oslo_concurrency.lockutils [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1143.534814] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1143.535108] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0ed67249-991c-4aa8-8d3c-24f7bc6ba82f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.537663] env[68040]: DEBUG nova.compute.manager [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1143.538067] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1143.538856] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7498b3a1-9b7e-4b46-a3d2-33345c84ca17 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.546577] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Unregistering the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1143.547251] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8579ae03-fb31-44a3-98ed-77922d5787f8 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.549602] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1143.549841] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68040) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1143.550921] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f3d78bf-baab-45b2-887f-4937a2ca96d5 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.556149] env[68040]: DEBUG oslo_vmware.api [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Waiting for the task: (returnval){ [ 1143.556149] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52d959ba-64e8-fbda-840f-c0c97b6b8de8" [ 1143.556149] env[68040]: _type = "Task" [ 1143.556149] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.564013] env[68040]: DEBUG oslo_vmware.api [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52d959ba-64e8-fbda-840f-c0c97b6b8de8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.617144] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Unregistered the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1143.617453] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Deleting contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1143.617749] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Deleting the datastore file [datastore2] 39de4e78-44cd-4582-998e-88ce6de2d51c {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1143.618242] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-95e488a4-783e-43c1-9c47-a8a5e8906fc2 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.626903] env[68040]: DEBUG oslo_vmware.api [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Waiting for the task: (returnval){ [ 1143.626903] env[68040]: value = "task-3200246" [ 1143.626903] env[68040]: _type = "Task" [ 1143.626903] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.635369] env[68040]: DEBUG oslo_vmware.api [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Task: {'id': task-3200246, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.984618] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1144.006368] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1144.007216] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1144.007216] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1144.007216] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68040) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1144.008954] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51fbb8f2-f4d9-4261-9b86-8e991f3359ac {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.019720] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc108639-1dd7-4ab9-ba2d-068083e0644a {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.037271] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d272888-7d12-4e78-8771-1566405422bb {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.042598] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24050682-bb6c-4ee0-9f32-f05ca4cd06ed {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.082430] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180949MB free_disk=125GB free_vcpus=48 pci_devices=None {{(pid=68040) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1144.082593] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1144.082793] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1144.093379] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Preparing fetch location {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1144.093630] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Creating directory with path [datastore2] vmware_temp/25d306b9-3008-4258-bb2b-1249d59d78ea/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1144.093864] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ad0e1aa8-f442-49de-afd9-0f3961e75b6b {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.106426] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Created directory with path [datastore2] vmware_temp/25d306b9-3008-4258-bb2b-1249d59d78ea/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1144.106625] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Fetch image to [datastore2] vmware_temp/25d306b9-3008-4258-bb2b-1249d59d78ea/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1144.106799] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to [datastore2] vmware_temp/25d306b9-3008-4258-bb2b-1249d59d78ea/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1144.107583] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b239000-f5b9-48c4-826e-d346477ca058 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.115678] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e810c5a0-759c-40ab-a28d-442c3f464b9c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.125209] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cb85f1d-5e09-400c-9f3f-f079bd075fd9 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.166641] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40c15f63-186a-46ee-ae8a-4b6afbef32b3 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.174588] env[68040]: DEBUG oslo_vmware.api [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Task: {'id': task-3200246, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.082371} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.174778] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Deleted the datastore file {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1144.175027] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Deleted contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1144.175210] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1144.175386] env[68040]: INFO nova.compute.manager [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Took 0.64 seconds to destroy the instance on the hypervisor. [ 1144.179916] env[68040]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-81ee9072-a1f0-48fa-b5e2-a593db42d21c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.181842] env[68040]: DEBUG nova.compute.claims [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Aborting claim: {{(pid=68040) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1144.182045] env[68040]: DEBUG oslo_concurrency.lockutils [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1144.206223] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 39de4e78-44cd-4582-998e-88ce6de2d51c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1144.206378] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance a89ff564-ea35-4000-8efa-2c1ec2b61759 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1144.207165] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 42f39352-e703-4ebf-9559-4c8b5abca70e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1144.207165] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance d1819f29-a891-47dd-a456-8f3b127daf6f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1144.207165] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 1812f13e-b03d-48d4-940a-43974784265b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1144.207165] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance de1b8ef9-0088-4d2a-985e-d04fcff55d31 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1144.207340] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance bce68a2b-260c-45cc-ac98-d4b01b4513a4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1144.207340] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 1e43f6be-f6a3-4569-adea-c82a5d709247 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1144.207340] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 3738de32-79cd-4b04-8081-cc1146730c75 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1144.207340] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1144.212460] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1144.230402] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1144.243601] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance e92b662c-b458-49d8-ac2a-00ae6046a11b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1144.258608] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 57cd94c2-aec3-427e-9b9f-a444fe291974 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1144.269475] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 87a7851e-d6fe-481a-8abb-5732e281cb64 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1144.282874] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance bd14d08b-d71a-43b0-b72a-6504dc0e2142 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1144.289383] env[68040]: DEBUG oslo_vmware.rw_handles [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/25d306b9-3008-4258-bb2b-1249d59d78ea/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1144.347816] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 00305041-c0c0-4b7b-9149-9bcba4392279 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1144.352934] env[68040]: DEBUG oslo_vmware.rw_handles [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Completed reading data from the image iterator. {{(pid=68040) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1144.352934] env[68040]: DEBUG oslo_vmware.rw_handles [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/25d306b9-3008-4258-bb2b-1249d59d78ea/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1144.362046] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 6011cb9b-e882-4eb2-96b4-82a43585acbc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1144.373247] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 81cfab4f-6a32-42b0-bbfc-45596bc9ad4e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1144.384308] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance f6edf79d-5eff-4e2c-94d2-aa5cf1731748 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1144.395087] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance c581d685-7ea0-41f8-b911-ff1dce1b46c7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1144.407662] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 310e3ad1-aa4c-44d1-b1e9-152d1de39125 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1144.419022] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 72f5f3ba-c931-40a5-ab73-4e6738e0aaba has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1144.431194] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance c2f603dd-6a9b-4a0f-b50a-263cf8eb70af has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1144.443287] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance ec66dda9-4e56-4baa-b8aa-8b01f28d8e9c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1144.460375] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 8535d103-7bdf-4210-aa1e-180bb100de5f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1144.460628] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1144.460775] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1144.855317] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a1280e4-c3b1-4303-af0a-1a47fbb163bd {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.865607] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83efcc2c-b588-46d8-89b6-419fed8e954a {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.896048] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-888afc7f-9770-4585-9700-bbcd4adad3de {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.905574] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5398a6bd-9892-4727-93bb-842d7a17dc7d {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.923178] env[68040]: DEBUG nova.compute.provider_tree [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1144.933470] env[68040]: DEBUG nova.scheduler.client.report [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1144.952824] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68040) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1144.953135] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.870s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1144.953325] env[68040]: DEBUG oslo_concurrency.lockutils [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.771s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1145.403746] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ce0eb1d-008d-42c3-acc1-babc2a130d79 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.413542] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73b4c568-9874-4dbf-8ab2-623114d3a067 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.445741] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72d0888f-8a8b-4ae1-bc94-4d4edb63a00e {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.455063] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0752da80-4bcc-4235-9722-b4668e7b89c1 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.469569] env[68040]: DEBUG nova.compute.provider_tree [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1145.481808] env[68040]: DEBUG nova.scheduler.client.report [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1145.501717] env[68040]: DEBUG oslo_concurrency.lockutils [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.547s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1145.501717] env[68040]: ERROR nova.compute.manager [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1145.501717] env[68040]: Faults: ['InvalidArgument'] [ 1145.501717] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Traceback (most recent call last): [ 1145.501717] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1145.501717] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] self.driver.spawn(context, instance, image_meta, [ 1145.501717] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1145.501717] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1145.501717] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1145.501717] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] self._fetch_image_if_missing(context, vi) [ 1145.502137] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1145.502137] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] image_cache(vi, tmp_image_ds_loc) [ 1145.502137] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1145.502137] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] vm_util.copy_virtual_disk( [ 1145.502137] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1145.502137] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] session._wait_for_task(vmdk_copy_task) [ 1145.502137] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1145.502137] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] return self.wait_for_task(task_ref) [ 1145.502137] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1145.502137] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] return evt.wait() [ 1145.502137] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1145.502137] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] result = hub.switch() [ 1145.502137] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1145.502449] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] return self.greenlet.switch() [ 1145.502449] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1145.502449] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] self.f(*self.args, **self.kw) [ 1145.502449] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1145.502449] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] raise exceptions.translate_fault(task_info.error) [ 1145.502449] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1145.502449] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Faults: ['InvalidArgument'] [ 1145.502449] env[68040]: ERROR nova.compute.manager [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] [ 1145.502449] env[68040]: DEBUG nova.compute.utils [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] VimFaultException {{(pid=68040) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1145.503878] env[68040]: DEBUG nova.compute.manager [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Build of instance 39de4e78-44cd-4582-998e-88ce6de2d51c was re-scheduled: A specified parameter was not correct: fileType [ 1145.503878] env[68040]: Faults: ['InvalidArgument'] {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1145.504272] env[68040]: DEBUG nova.compute.manager [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Unplugging VIFs for instance {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1145.504574] env[68040]: DEBUG nova.compute.manager [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1145.504738] env[68040]: DEBUG nova.compute.manager [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1145.504909] env[68040]: DEBUG nova.network.neutron [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1145.956564] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1145.983279] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1145.987021] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1146.060582] env[68040]: DEBUG nova.network.neutron [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1146.072629] env[68040]: INFO nova.compute.manager [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Took 0.57 seconds to deallocate network for instance. [ 1146.193478] env[68040]: INFO nova.scheduler.client.report [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Deleted allocations for instance 39de4e78-44cd-4582-998e-88ce6de2d51c [ 1146.217153] env[68040]: DEBUG oslo_concurrency.lockutils [None req-60eba638-26b9-4d35-864b-3b0dc752cd03 tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Lock "39de4e78-44cd-4582-998e-88ce6de2d51c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 482.267s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1146.218352] env[68040]: DEBUG oslo_concurrency.lockutils [None req-19e2ad11-bcf8-4b05-9b63-5a427215fc5c tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Lock "39de4e78-44cd-4582-998e-88ce6de2d51c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 284.932s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1146.218571] env[68040]: DEBUG oslo_concurrency.lockutils [None req-19e2ad11-bcf8-4b05-9b63-5a427215fc5c tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Acquiring lock "39de4e78-44cd-4582-998e-88ce6de2d51c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1146.218801] env[68040]: DEBUG oslo_concurrency.lockutils [None req-19e2ad11-bcf8-4b05-9b63-5a427215fc5c tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Lock "39de4e78-44cd-4582-998e-88ce6de2d51c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1146.218986] env[68040]: DEBUG oslo_concurrency.lockutils [None req-19e2ad11-bcf8-4b05-9b63-5a427215fc5c tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Lock "39de4e78-44cd-4582-998e-88ce6de2d51c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1146.222908] env[68040]: INFO nova.compute.manager [None req-19e2ad11-bcf8-4b05-9b63-5a427215fc5c tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Terminating instance [ 1146.225671] env[68040]: DEBUG nova.compute.manager [None req-19e2ad11-bcf8-4b05-9b63-5a427215fc5c tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1146.226367] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-19e2ad11-bcf8-4b05-9b63-5a427215fc5c tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1146.226367] env[68040]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dbe417c2-4e5c-4973-8932-22e67fd8804d {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.237037] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b0e218d-ebb5-48e2-a10c-c2c8dd3ea9e5 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.250418] env[68040]: DEBUG nova.compute.manager [None req-40811468-782c-4f4a-bd9d-fd0224b67b79 tempest-MultipleCreateTestJSON-225064095 tempest-MultipleCreateTestJSON-225064095-project-member] [instance: 3f9438b9-bfe3-4a7f-bfec-e140fed5f66c] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1146.274798] env[68040]: WARNING nova.virt.vmwareapi.vmops [None req-19e2ad11-bcf8-4b05-9b63-5a427215fc5c tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 39de4e78-44cd-4582-998e-88ce6de2d51c could not be found. [ 1146.275116] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-19e2ad11-bcf8-4b05-9b63-5a427215fc5c tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1146.275369] env[68040]: INFO nova.compute.manager [None req-19e2ad11-bcf8-4b05-9b63-5a427215fc5c tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1146.275628] env[68040]: DEBUG oslo.service.loopingcall [None req-19e2ad11-bcf8-4b05-9b63-5a427215fc5c tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1146.275870] env[68040]: DEBUG nova.compute.manager [-] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1146.275965] env[68040]: DEBUG nova.network.neutron [-] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1146.288861] env[68040]: DEBUG nova.compute.manager [None req-40811468-782c-4f4a-bd9d-fd0224b67b79 tempest-MultipleCreateTestJSON-225064095 tempest-MultipleCreateTestJSON-225064095-project-member] [instance: 3f9438b9-bfe3-4a7f-bfec-e140fed5f66c] Instance disappeared before build. {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1146.311168] env[68040]: DEBUG nova.network.neutron [-] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1146.318363] env[68040]: DEBUG oslo_concurrency.lockutils [None req-40811468-782c-4f4a-bd9d-fd0224b67b79 tempest-MultipleCreateTestJSON-225064095 tempest-MultipleCreateTestJSON-225064095-project-member] Lock "3f9438b9-bfe3-4a7f-bfec-e140fed5f66c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 237.494s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1146.324022] env[68040]: INFO nova.compute.manager [-] [instance: 39de4e78-44cd-4582-998e-88ce6de2d51c] Took 0.05 seconds to deallocate network for instance. [ 1146.331031] env[68040]: DEBUG nova.compute.manager [None req-40811468-782c-4f4a-bd9d-fd0224b67b79 tempest-MultipleCreateTestJSON-225064095 tempest-MultipleCreateTestJSON-225064095-project-member] [instance: e2cd4cbd-279b-4852-85b0-f78af45bf7e2] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1146.366947] env[68040]: DEBUG nova.compute.manager [None req-40811468-782c-4f4a-bd9d-fd0224b67b79 tempest-MultipleCreateTestJSON-225064095 tempest-MultipleCreateTestJSON-225064095-project-member] [instance: e2cd4cbd-279b-4852-85b0-f78af45bf7e2] Instance disappeared before build. {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1146.389248] env[68040]: DEBUG oslo_concurrency.lockutils [None req-40811468-782c-4f4a-bd9d-fd0224b67b79 tempest-MultipleCreateTestJSON-225064095 tempest-MultipleCreateTestJSON-225064095-project-member] Lock "e2cd4cbd-279b-4852-85b0-f78af45bf7e2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 237.523s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1146.401022] env[68040]: DEBUG nova.compute.manager [None req-db81460a-d299-4a77-a426-bf8dc826c72c tempest-ServerActionsTestJSON-706247280 tempest-ServerActionsTestJSON-706247280-project-member] [instance: 8ec0f6bd-4a3e-4e70-b310-714676607b9c] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1146.432544] env[68040]: DEBUG nova.compute.manager [None req-db81460a-d299-4a77-a426-bf8dc826c72c tempest-ServerActionsTestJSON-706247280 tempest-ServerActionsTestJSON-706247280-project-member] [instance: 8ec0f6bd-4a3e-4e70-b310-714676607b9c] Instance disappeared before build. {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1146.447692] env[68040]: DEBUG oslo_concurrency.lockutils [None req-19e2ad11-bcf8-4b05-9b63-5a427215fc5c tempest-ServerDiagnosticsTest-692886689 tempest-ServerDiagnosticsTest-692886689-project-member] Lock "39de4e78-44cd-4582-998e-88ce6de2d51c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.229s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1146.462213] env[68040]: DEBUG oslo_concurrency.lockutils [None req-db81460a-d299-4a77-a426-bf8dc826c72c tempest-ServerActionsTestJSON-706247280 tempest-ServerActionsTestJSON-706247280-project-member] Lock "8ec0f6bd-4a3e-4e70-b310-714676607b9c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 236.915s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1146.472309] env[68040]: DEBUG nova.compute.manager [None req-010535a2-5c2c-4124-bd7d-0ad1b27a34e1 tempest-ServerMetadataTestJSON-1008625601 tempest-ServerMetadataTestJSON-1008625601-project-member] [instance: 25298be1-8cc9-46fa-9b33-62425bcb91dc] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1146.503042] env[68040]: DEBUG nova.compute.manager [None req-010535a2-5c2c-4124-bd7d-0ad1b27a34e1 tempest-ServerMetadataTestJSON-1008625601 tempest-ServerMetadataTestJSON-1008625601-project-member] [instance: 25298be1-8cc9-46fa-9b33-62425bcb91dc] Instance disappeared before build. {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1146.525564] env[68040]: DEBUG oslo_concurrency.lockutils [None req-010535a2-5c2c-4124-bd7d-0ad1b27a34e1 tempest-ServerMetadataTestJSON-1008625601 tempest-ServerMetadataTestJSON-1008625601-project-member] Lock "25298be1-8cc9-46fa-9b33-62425bcb91dc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 235.401s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1146.541887] env[68040]: DEBUG nova.compute.manager [None req-1c2fbfe0-bfd1-43fe-b06b-7f3fa185a788 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: 67756ba9-5738-4669-ace9-a3d2f1952dfa] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1146.566847] env[68040]: DEBUG nova.compute.manager [None req-1c2fbfe0-bfd1-43fe-b06b-7f3fa185a788 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: 67756ba9-5738-4669-ace9-a3d2f1952dfa] Instance disappeared before build. {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1146.575127] env[68040]: DEBUG oslo_concurrency.lockutils [None req-3059098d-38cc-47b4-a221-522a20c890e9 tempest-InstanceActionsNegativeTestJSON-1151606230 tempest-InstanceActionsNegativeTestJSON-1151606230-project-member] Acquiring lock "462c8f10-1dda-4687-946c-fb40c3e4f049" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1146.575353] env[68040]: DEBUG oslo_concurrency.lockutils [None req-3059098d-38cc-47b4-a221-522a20c890e9 tempest-InstanceActionsNegativeTestJSON-1151606230 tempest-InstanceActionsNegativeTestJSON-1151606230-project-member] Lock "462c8f10-1dda-4687-946c-fb40c3e4f049" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1146.589716] env[68040]: DEBUG oslo_concurrency.lockutils [None req-1c2fbfe0-bfd1-43fe-b06b-7f3fa185a788 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Lock "67756ba9-5738-4669-ace9-a3d2f1952dfa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 232.831s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1146.602333] env[68040]: DEBUG nova.compute.manager [None req-f759feb5-2012-4bf9-8b20-3541746c3a9a tempest-SecurityGroupsTestJSON-810175009 tempest-SecurityGroupsTestJSON-810175009-project-member] [instance: 940a6a43-d74e-419c-af5b-92c991e3649d] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1146.627496] env[68040]: DEBUG nova.compute.manager [None req-f759feb5-2012-4bf9-8b20-3541746c3a9a tempest-SecurityGroupsTestJSON-810175009 tempest-SecurityGroupsTestJSON-810175009-project-member] [instance: 940a6a43-d74e-419c-af5b-92c991e3649d] Instance disappeared before build. {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1146.655182] env[68040]: DEBUG oslo_concurrency.lockutils [None req-f759feb5-2012-4bf9-8b20-3541746c3a9a tempest-SecurityGroupsTestJSON-810175009 tempest-SecurityGroupsTestJSON-810175009-project-member] Lock "940a6a43-d74e-419c-af5b-92c991e3649d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 228.397s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1146.667469] env[68040]: DEBUG nova.compute.manager [None req-4857b5a4-1338-4d93-af7c-84ff79ed1756 tempest-ServersTestMultiNic-1524601141 tempest-ServersTestMultiNic-1524601141-project-member] [instance: 1b4b422a-f096-4ed1-9d47-f150e7a3434f] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1146.697064] env[68040]: DEBUG nova.compute.manager [None req-4857b5a4-1338-4d93-af7c-84ff79ed1756 tempest-ServersTestMultiNic-1524601141 tempest-ServersTestMultiNic-1524601141-project-member] [instance: 1b4b422a-f096-4ed1-9d47-f150e7a3434f] Instance disappeared before build. {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1146.727727] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4857b5a4-1338-4d93-af7c-84ff79ed1756 tempest-ServersTestMultiNic-1524601141 tempest-ServersTestMultiNic-1524601141-project-member] Lock "1b4b422a-f096-4ed1-9d47-f150e7a3434f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 218.452s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1146.751278] env[68040]: DEBUG nova.compute.manager [None req-675ea968-f2bd-4ccc-9ae9-19b4a4037098 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] [instance: 3e7cb203-0bad-49d0-83d4-b5a086c31ad6] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1146.778537] env[68040]: DEBUG nova.compute.manager [None req-675ea968-f2bd-4ccc-9ae9-19b4a4037098 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] [instance: 3e7cb203-0bad-49d0-83d4-b5a086c31ad6] Instance disappeared before build. {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1146.801299] env[68040]: DEBUG oslo_concurrency.lockutils [None req-675ea968-f2bd-4ccc-9ae9-19b4a4037098 tempest-MigrationsAdminTest-1851392124 tempest-MigrationsAdminTest-1851392124-project-member] Lock "3e7cb203-0bad-49d0-83d4-b5a086c31ad6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 201.321s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1146.811930] env[68040]: DEBUG nova.compute.manager [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1146.869552] env[68040]: DEBUG oslo_concurrency.lockutils [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1146.870225] env[68040]: DEBUG oslo_concurrency.lockutils [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1146.871408] env[68040]: INFO nova.compute.claims [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1146.983766] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1147.241082] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa08fdca-2bef-40e3-a2ed-38b1ea13bb99 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.249550] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f67929de-d603-446e-a191-4dd9fe55bd09 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.281307] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ea46152-5d16-4408-8461-7f19654e2f2a {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.289624] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98dfa086-98e1-4e9b-aba9-221c74d26ed4 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.307256] env[68040]: DEBUG nova.compute.provider_tree [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1147.313506] env[68040]: DEBUG nova.scheduler.client.report [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1147.348217] env[68040]: DEBUG oslo_concurrency.lockutils [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.478s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1147.348728] env[68040]: DEBUG nova.compute.manager [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Start building networks asynchronously for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1147.388420] env[68040]: DEBUG nova.compute.utils [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Using /dev/sd instead of None {{(pid=68040) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1147.389026] env[68040]: DEBUG nova.compute.manager [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Allocating IP information in the background. {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1147.389690] env[68040]: DEBUG nova.network.neutron [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] allocate_for_instance() {{(pid=68040) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1147.398076] env[68040]: DEBUG nova.compute.manager [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Start building block device mappings for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1147.448549] env[68040]: DEBUG nova.policy [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e8612952369b4df29a13c175e24da375', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '482f4f7b22b247e4b5c680860e0657f8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68040) authorize /opt/stack/nova/nova/policy.py:203}} [ 1147.469358] env[68040]: DEBUG nova.compute.manager [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Start spawning the instance on the hypervisor. {{(pid=68040) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1147.497512] env[68040]: DEBUG nova.virt.hardware [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:59:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:59:33Z,direct_url=,disk_format='vmdk',id=8c308313-03d5-40b6-a5fe-9037e32dc76e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0770d674a39c40089de0aade9440b370',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:59:34Z,virtual_size=,visibility=), allow threads: False {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1147.497820] env[68040]: DEBUG nova.virt.hardware [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Flavor limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1147.497996] env[68040]: DEBUG nova.virt.hardware [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Image limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1147.498202] env[68040]: DEBUG nova.virt.hardware [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Flavor pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1147.498355] env[68040]: DEBUG nova.virt.hardware [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Image pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1147.498506] env[68040]: DEBUG nova.virt.hardware [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1147.498723] env[68040]: DEBUG nova.virt.hardware [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1147.498920] env[68040]: DEBUG nova.virt.hardware [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1147.499631] env[68040]: DEBUG nova.virt.hardware [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Got 1 possible topologies {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1147.502581] env[68040]: DEBUG nova.virt.hardware [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1147.502581] env[68040]: DEBUG nova.virt.hardware [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1147.502581] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5828cdf-06a7-46c1-893f-df732544433a {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.513029] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5d07ea2-fa8f-4090-aa6e-d3e73d46cf5a {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.803053] env[68040]: DEBUG nova.network.neutron [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Successfully created port: 4aa30166-b7b1-4267-933f-623ca6f467b1 {{(pid=68040) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1147.983658] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1147.983829] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Starting heal instance info cache {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1147.983943] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Rebuilding the list of instances to heal {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1148.007571] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1148.007777] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1148.007903] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1148.008043] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1148.008177] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1148.008298] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1148.008416] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1148.008533] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1148.008650] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1148.008858] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1148.009193] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Didn't find any instances for network info cache update. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1148.009428] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1148.353046] env[68040]: DEBUG nova.compute.manager [req-22006231-e2ec-4e4e-8809-8c455f59a93c req-40b02596-4b64-481c-a932-c0435d633508 service nova] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Received event network-vif-plugged-4aa30166-b7b1-4267-933f-623ca6f467b1 {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1148.353274] env[68040]: DEBUG oslo_concurrency.lockutils [req-22006231-e2ec-4e4e-8809-8c455f59a93c req-40b02596-4b64-481c-a932-c0435d633508 service nova] Acquiring lock "b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1148.353483] env[68040]: DEBUG oslo_concurrency.lockutils [req-22006231-e2ec-4e4e-8809-8c455f59a93c req-40b02596-4b64-481c-a932-c0435d633508 service nova] Lock "b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1148.353650] env[68040]: DEBUG oslo_concurrency.lockutils [req-22006231-e2ec-4e4e-8809-8c455f59a93c req-40b02596-4b64-481c-a932-c0435d633508 service nova] Lock "b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1148.353816] env[68040]: DEBUG nova.compute.manager [req-22006231-e2ec-4e4e-8809-8c455f59a93c req-40b02596-4b64-481c-a932-c0435d633508 service nova] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] No waiting events found dispatching network-vif-plugged-4aa30166-b7b1-4267-933f-623ca6f467b1 {{(pid=68040) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1148.353979] env[68040]: WARNING nova.compute.manager [req-22006231-e2ec-4e4e-8809-8c455f59a93c req-40b02596-4b64-481c-a932-c0435d633508 service nova] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Received unexpected event network-vif-plugged-4aa30166-b7b1-4267-933f-623ca6f467b1 for instance with vm_state building and task_state spawning. [ 1148.438637] env[68040]: DEBUG nova.network.neutron [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Successfully updated port: 4aa30166-b7b1-4267-933f-623ca6f467b1 {{(pid=68040) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1148.453639] env[68040]: DEBUG oslo_concurrency.lockutils [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Acquiring lock "refresh_cache-b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1148.453857] env[68040]: DEBUG oslo_concurrency.lockutils [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Acquired lock "refresh_cache-b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1148.453946] env[68040]: DEBUG nova.network.neutron [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Building network info cache for instance {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1148.498614] env[68040]: DEBUG nova.network.neutron [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1148.723598] env[68040]: DEBUG nova.network.neutron [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Updating instance_info_cache with network_info: [{"id": "4aa30166-b7b1-4267-933f-623ca6f467b1", "address": "fa:16:3e:66:80:e9", "network": {"id": "6a93af2c-c022-43ab-933a-60ea805831b0", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-231749002-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "482f4f7b22b247e4b5c680860e0657f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f01bbee7-8b9a-46be-891e-59d8142fb359", "external-id": "nsx-vlan-transportzone-145", "segmentation_id": 145, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4aa30166-b7", "ovs_interfaceid": "4aa30166-b7b1-4267-933f-623ca6f467b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1148.739243] env[68040]: DEBUG oslo_concurrency.lockutils [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Releasing lock "refresh_cache-b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1148.739243] env[68040]: DEBUG nova.compute.manager [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Instance network_info: |[{"id": "4aa30166-b7b1-4267-933f-623ca6f467b1", "address": "fa:16:3e:66:80:e9", "network": {"id": "6a93af2c-c022-43ab-933a-60ea805831b0", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-231749002-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "482f4f7b22b247e4b5c680860e0657f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f01bbee7-8b9a-46be-891e-59d8142fb359", "external-id": "nsx-vlan-transportzone-145", "segmentation_id": 145, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4aa30166-b7", "ovs_interfaceid": "4aa30166-b7b1-4267-933f-623ca6f467b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1148.739405] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:66:80:e9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f01bbee7-8b9a-46be-891e-59d8142fb359', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4aa30166-b7b1-4267-933f-623ca6f467b1', 'vif_model': 'vmxnet3'}] {{(pid=68040) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1148.749906] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Creating folder: Project (482f4f7b22b247e4b5c680860e0657f8). Parent ref: group-v639956. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1148.750510] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-51523815-e1d9-46bf-8168-461cd5263662 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.765174] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Created folder: Project (482f4f7b22b247e4b5c680860e0657f8) in parent group-v639956. [ 1148.765444] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Creating folder: Instances. Parent ref: group-v640020. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1148.765897] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1204b118-fdb8-4326-8850-c73de3a7baa5 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.777029] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Created folder: Instances in parent group-v640020. [ 1148.777176] env[68040]: DEBUG oslo.service.loopingcall [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1148.777371] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Creating VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1148.777578] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-893b4658-2295-4759-b38b-76844c41badd {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.798876] env[68040]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1148.798876] env[68040]: value = "task-3200249" [ 1148.798876] env[68040]: _type = "Task" [ 1148.798876] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.807410] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200249, 'name': CreateVM_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.005315] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1149.313270] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200249, 'name': CreateVM_Task, 'duration_secs': 0.296104} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.313270] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Created VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1149.313270] env[68040]: DEBUG oslo_concurrency.lockutils [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1149.313270] env[68040]: DEBUG oslo_concurrency.lockutils [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1149.313270] env[68040]: DEBUG oslo_concurrency.lockutils [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1149.313448] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4767643d-b41e-42b2-b325-1106af2d7b17 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.319132] env[68040]: DEBUG oslo_vmware.api [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Waiting for the task: (returnval){ [ 1149.319132] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52766bda-87de-2f46-70cb-1c3f834fb398" [ 1149.319132] env[68040]: _type = "Task" [ 1149.319132] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.334032] env[68040]: DEBUG oslo_vmware.api [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52766bda-87de-2f46-70cb-1c3f834fb398, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.829036] env[68040]: DEBUG oslo_concurrency.lockutils [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1149.829332] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Processing image 8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1149.829452] env[68040]: DEBUG oslo_concurrency.lockutils [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1150.224327] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4dd95105-af05-40ac-9d1e-c06ea3a4394f tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Acquiring lock "e54d02e5-1e98-4e9d-93e7-bcccfa3307e1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1150.224555] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4dd95105-af05-40ac-9d1e-c06ea3a4394f tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Lock "e54d02e5-1e98-4e9d-93e7-bcccfa3307e1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1150.408483] env[68040]: DEBUG nova.compute.manager [req-4361d573-486d-4d39-9b3d-989566861f3b req-b7a48906-0957-4fbb-b636-e0373f943cc1 service nova] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Received event network-changed-4aa30166-b7b1-4267-933f-623ca6f467b1 {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1150.408852] env[68040]: DEBUG nova.compute.manager [req-4361d573-486d-4d39-9b3d-989566861f3b req-b7a48906-0957-4fbb-b636-e0373f943cc1 service nova] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Refreshing instance network info cache due to event network-changed-4aa30166-b7b1-4267-933f-623ca6f467b1. {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1150.409487] env[68040]: DEBUG oslo_concurrency.lockutils [req-4361d573-486d-4d39-9b3d-989566861f3b req-b7a48906-0957-4fbb-b636-e0373f943cc1 service nova] Acquiring lock "refresh_cache-b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1150.409855] env[68040]: DEBUG oslo_concurrency.lockutils [req-4361d573-486d-4d39-9b3d-989566861f3b req-b7a48906-0957-4fbb-b636-e0373f943cc1 service nova] Acquired lock "refresh_cache-b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1150.410135] env[68040]: DEBUG nova.network.neutron [req-4361d573-486d-4d39-9b3d-989566861f3b req-b7a48906-0957-4fbb-b636-e0373f943cc1 service nova] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Refreshing network info cache for port 4aa30166-b7b1-4267-933f-623ca6f467b1 {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1150.672493] env[68040]: DEBUG nova.network.neutron [req-4361d573-486d-4d39-9b3d-989566861f3b req-b7a48906-0957-4fbb-b636-e0373f943cc1 service nova] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Updated VIF entry in instance network info cache for port 4aa30166-b7b1-4267-933f-623ca6f467b1. {{(pid=68040) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1150.672751] env[68040]: DEBUG nova.network.neutron [req-4361d573-486d-4d39-9b3d-989566861f3b req-b7a48906-0957-4fbb-b636-e0373f943cc1 service nova] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Updating instance_info_cache with network_info: [{"id": "4aa30166-b7b1-4267-933f-623ca6f467b1", "address": "fa:16:3e:66:80:e9", "network": {"id": "6a93af2c-c022-43ab-933a-60ea805831b0", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-231749002-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "482f4f7b22b247e4b5c680860e0657f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f01bbee7-8b9a-46be-891e-59d8142fb359", "external-id": "nsx-vlan-transportzone-145", "segmentation_id": 145, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4aa30166-b7", "ovs_interfaceid": "4aa30166-b7b1-4267-933f-623ca6f467b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1150.684423] env[68040]: DEBUG oslo_concurrency.lockutils [req-4361d573-486d-4d39-9b3d-989566861f3b req-b7a48906-0957-4fbb-b636-e0373f943cc1 service nova] Releasing lock "refresh_cache-b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1151.983335] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1151.983546] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68040) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1191.900964] env[68040]: WARNING oslo_vmware.rw_handles [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1191.900964] env[68040]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1191.900964] env[68040]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1191.900964] env[68040]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1191.900964] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1191.900964] env[68040]: ERROR oslo_vmware.rw_handles response.begin() [ 1191.900964] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1191.900964] env[68040]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1191.900964] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1191.900964] env[68040]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1191.900964] env[68040]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1191.900964] env[68040]: ERROR oslo_vmware.rw_handles [ 1191.901623] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Downloaded image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to vmware_temp/25d306b9-3008-4258-bb2b-1249d59d78ea/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1191.903241] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Caching image {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1191.903478] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Copying Virtual Disk [datastore2] vmware_temp/25d306b9-3008-4258-bb2b-1249d59d78ea/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk to [datastore2] vmware_temp/25d306b9-3008-4258-bb2b-1249d59d78ea/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk {{(pid=68040) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1191.903761] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6fbf7a1a-4987-4864-9eba-3838f7309df3 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.913027] env[68040]: DEBUG oslo_vmware.api [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Waiting for the task: (returnval){ [ 1191.913027] env[68040]: value = "task-3200250" [ 1191.913027] env[68040]: _type = "Task" [ 1191.913027] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.920894] env[68040]: DEBUG oslo_vmware.api [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Task: {'id': task-3200250, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.423412] env[68040]: DEBUG oslo_vmware.exceptions [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Fault InvalidArgument not matched. {{(pid=68040) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1192.423639] env[68040]: DEBUG oslo_concurrency.lockutils [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1192.424203] env[68040]: ERROR nova.compute.manager [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1192.424203] env[68040]: Faults: ['InvalidArgument'] [ 1192.424203] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Traceback (most recent call last): [ 1192.424203] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1192.424203] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] yield resources [ 1192.424203] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1192.424203] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] self.driver.spawn(context, instance, image_meta, [ 1192.424203] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1192.424203] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1192.424203] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1192.424203] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] self._fetch_image_if_missing(context, vi) [ 1192.424203] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1192.424634] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] image_cache(vi, tmp_image_ds_loc) [ 1192.424634] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1192.424634] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] vm_util.copy_virtual_disk( [ 1192.424634] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1192.424634] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] session._wait_for_task(vmdk_copy_task) [ 1192.424634] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1192.424634] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] return self.wait_for_task(task_ref) [ 1192.424634] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1192.424634] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] return evt.wait() [ 1192.424634] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1192.424634] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] result = hub.switch() [ 1192.424634] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1192.424634] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] return self.greenlet.switch() [ 1192.425181] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1192.425181] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] self.f(*self.args, **self.kw) [ 1192.425181] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1192.425181] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] raise exceptions.translate_fault(task_info.error) [ 1192.425181] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1192.425181] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Faults: ['InvalidArgument'] [ 1192.425181] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] [ 1192.425181] env[68040]: INFO nova.compute.manager [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Terminating instance [ 1192.426058] env[68040]: DEBUG oslo_concurrency.lockutils [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1192.426281] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1192.426515] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-242acca5-64b3-484e-abd2-da5191404131 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.428662] env[68040]: DEBUG nova.compute.manager [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1192.428856] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1192.429595] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5736d158-3ad7-499f-a656-7e7a33ebde82 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.436692] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Unregistering the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1192.436850] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-63b83cd9-c48d-4848-ba05-7bdeae4402fa {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.438940] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1192.439109] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68040) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1192.440128] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-767e01d8-a707-4472-88c9-b3fc78ab7d8b {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.444799] env[68040]: DEBUG oslo_vmware.api [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Waiting for the task: (returnval){ [ 1192.444799] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52c56478-a2a7-d584-3651-04377c4143f8" [ 1192.444799] env[68040]: _type = "Task" [ 1192.444799] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.452980] env[68040]: DEBUG oslo_vmware.api [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52c56478-a2a7-d584-3651-04377c4143f8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.522323] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Unregistered the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1192.522542] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Deleting contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1192.522725] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Deleting the datastore file [datastore2] a89ff564-ea35-4000-8efa-2c1ec2b61759 {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1192.522996] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-448d012c-2149-4d5e-a6e2-2cf3e73cbca8 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.528620] env[68040]: DEBUG oslo_vmware.api [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Waiting for the task: (returnval){ [ 1192.528620] env[68040]: value = "task-3200252" [ 1192.528620] env[68040]: _type = "Task" [ 1192.528620] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.536057] env[68040]: DEBUG oslo_vmware.api [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Task: {'id': task-3200252, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.955023] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Preparing fetch location {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1192.955349] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Creating directory with path [datastore2] vmware_temp/512953c3-553a-4ee5-8a96-7c14be80e78e/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1192.955573] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-126f5693-184f-4cc1-af64-10b9bc90bcb0 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.967294] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Created directory with path [datastore2] vmware_temp/512953c3-553a-4ee5-8a96-7c14be80e78e/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1192.967489] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Fetch image to [datastore2] vmware_temp/512953c3-553a-4ee5-8a96-7c14be80e78e/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1192.967662] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to [datastore2] vmware_temp/512953c3-553a-4ee5-8a96-7c14be80e78e/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1192.968440] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5561c258-e5b2-4ba1-b13c-f6653fb25c01 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.974851] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2588d2ac-6b69-44d7-9808-408b12cc6c7a {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.983633] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eedd681e-38bc-43ed-9272-e65e7553d5ee {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.014342] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adcf6663-1572-468c-b42d-a830a9244645 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.020427] env[68040]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c46bfc28-7467-4827-b2bd-d16416a1dbce {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.037030] env[68040]: DEBUG oslo_vmware.api [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Task: {'id': task-3200252, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.069461} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.037269] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Deleted the datastore file {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1193.037458] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Deleted contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1193.037631] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1193.037809] env[68040]: INFO nova.compute.manager [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1193.039978] env[68040]: DEBUG nova.compute.claims [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Aborting claim: {{(pid=68040) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1193.040171] env[68040]: DEBUG oslo_concurrency.lockutils [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1193.040389] env[68040]: DEBUG oslo_concurrency.lockutils [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1193.044412] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1193.129885] env[68040]: DEBUG oslo_vmware.rw_handles [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/512953c3-553a-4ee5-8a96-7c14be80e78e/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1193.192648] env[68040]: DEBUG oslo_vmware.rw_handles [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Completed reading data from the image iterator. {{(pid=68040) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1193.192648] env[68040]: DEBUG oslo_vmware.rw_handles [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/512953c3-553a-4ee5-8a96-7c14be80e78e/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1193.489047] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-674e3d39-a51a-45e3-a34e-86d2f44bdf8e {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.495973] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd7c63ad-c6b0-4e81-a760-7056bf1258c1 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.525616] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3874ed84-2588-4c6d-837e-d2bccbd9047d {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.532776] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a25570c-bb27-482a-b298-999264d0e168 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.545968] env[68040]: DEBUG nova.compute.provider_tree [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1193.555890] env[68040]: DEBUG nova.scheduler.client.report [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1193.576302] env[68040]: DEBUG oslo_concurrency.lockutils [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.536s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1193.576834] env[68040]: ERROR nova.compute.manager [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1193.576834] env[68040]: Faults: ['InvalidArgument'] [ 1193.576834] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Traceback (most recent call last): [ 1193.576834] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1193.576834] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] self.driver.spawn(context, instance, image_meta, [ 1193.576834] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1193.576834] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1193.576834] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1193.576834] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] self._fetch_image_if_missing(context, vi) [ 1193.576834] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1193.576834] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] image_cache(vi, tmp_image_ds_loc) [ 1193.576834] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1193.577333] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] vm_util.copy_virtual_disk( [ 1193.577333] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1193.577333] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] session._wait_for_task(vmdk_copy_task) [ 1193.577333] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1193.577333] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] return self.wait_for_task(task_ref) [ 1193.577333] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1193.577333] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] return evt.wait() [ 1193.577333] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1193.577333] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] result = hub.switch() [ 1193.577333] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1193.577333] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] return self.greenlet.switch() [ 1193.577333] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1193.577333] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] self.f(*self.args, **self.kw) [ 1193.577852] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1193.577852] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] raise exceptions.translate_fault(task_info.error) [ 1193.577852] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1193.577852] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Faults: ['InvalidArgument'] [ 1193.577852] env[68040]: ERROR nova.compute.manager [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] [ 1193.577852] env[68040]: DEBUG nova.compute.utils [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] VimFaultException {{(pid=68040) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1193.578989] env[68040]: DEBUG nova.compute.manager [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Build of instance a89ff564-ea35-4000-8efa-2c1ec2b61759 was re-scheduled: A specified parameter was not correct: fileType [ 1193.578989] env[68040]: Faults: ['InvalidArgument'] {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1193.579453] env[68040]: DEBUG nova.compute.manager [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Unplugging VIFs for instance {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1193.579632] env[68040]: DEBUG nova.compute.manager [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1193.579807] env[68040]: DEBUG nova.compute.manager [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1193.579971] env[68040]: DEBUG nova.network.neutron [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1194.078443] env[68040]: DEBUG nova.network.neutron [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1194.088231] env[68040]: INFO nova.compute.manager [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Took 0.51 seconds to deallocate network for instance. [ 1194.201302] env[68040]: INFO nova.scheduler.client.report [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Deleted allocations for instance a89ff564-ea35-4000-8efa-2c1ec2b61759 [ 1194.247691] env[68040]: DEBUG oslo_concurrency.lockutils [None req-96b77e52-a74b-46ad-88a2-9b0c8b768f8e tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Lock "a89ff564-ea35-4000-8efa-2c1ec2b61759" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 525.912s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1194.249148] env[68040]: DEBUG oslo_concurrency.lockutils [None req-d36c7935-42dc-4ea6-b86f-4f1a3e27c6d1 tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Lock "a89ff564-ea35-4000-8efa-2c1ec2b61759" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 327.673s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1194.249404] env[68040]: DEBUG oslo_concurrency.lockutils [None req-d36c7935-42dc-4ea6-b86f-4f1a3e27c6d1 tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Acquiring lock "a89ff564-ea35-4000-8efa-2c1ec2b61759-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1194.249623] env[68040]: DEBUG oslo_concurrency.lockutils [None req-d36c7935-42dc-4ea6-b86f-4f1a3e27c6d1 tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Lock "a89ff564-ea35-4000-8efa-2c1ec2b61759-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1194.249796] env[68040]: DEBUG oslo_concurrency.lockutils [None req-d36c7935-42dc-4ea6-b86f-4f1a3e27c6d1 tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Lock "a89ff564-ea35-4000-8efa-2c1ec2b61759-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1194.253321] env[68040]: INFO nova.compute.manager [None req-d36c7935-42dc-4ea6-b86f-4f1a3e27c6d1 tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Terminating instance [ 1194.255141] env[68040]: DEBUG nova.compute.manager [None req-d36c7935-42dc-4ea6-b86f-4f1a3e27c6d1 tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1194.255346] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-d36c7935-42dc-4ea6-b86f-4f1a3e27c6d1 tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1194.255812] env[68040]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-155e022a-82f2-4251-8578-1332ed6eb2fa {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.267173] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03c9a6de-626e-4c76-88e1-d0e2b711cdf6 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.281237] env[68040]: DEBUG nova.compute.manager [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1194.301771] env[68040]: WARNING nova.virt.vmwareapi.vmops [None req-d36c7935-42dc-4ea6-b86f-4f1a3e27c6d1 tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a89ff564-ea35-4000-8efa-2c1ec2b61759 could not be found. [ 1194.301771] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-d36c7935-42dc-4ea6-b86f-4f1a3e27c6d1 tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1194.302353] env[68040]: INFO nova.compute.manager [None req-d36c7935-42dc-4ea6-b86f-4f1a3e27c6d1 tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1194.302353] env[68040]: DEBUG oslo.service.loopingcall [None req-d36c7935-42dc-4ea6-b86f-4f1a3e27c6d1 tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1194.302482] env[68040]: DEBUG nova.compute.manager [-] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1194.302522] env[68040]: DEBUG nova.network.neutron [-] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1194.330579] env[68040]: DEBUG nova.network.neutron [-] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1194.337776] env[68040]: INFO nova.compute.manager [-] [instance: a89ff564-ea35-4000-8efa-2c1ec2b61759] Took 0.04 seconds to deallocate network for instance. [ 1194.343027] env[68040]: DEBUG oslo_concurrency.lockutils [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1194.343156] env[68040]: DEBUG oslo_concurrency.lockutils [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1194.344483] env[68040]: INFO nova.compute.claims [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1194.493216] env[68040]: DEBUG oslo_concurrency.lockutils [None req-d36c7935-42dc-4ea6-b86f-4f1a3e27c6d1 tempest-AttachInterfacesV270Test-1668457507 tempest-AttachInterfacesV270Test-1668457507-project-member] Lock "a89ff564-ea35-4000-8efa-2c1ec2b61759" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.244s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1194.698235] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86308f27-6e01-439e-8663-5fca74d48ca6 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.704651] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52f62ec1-7352-46bf-a416-92cd88b3ea89 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.735885] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01e53b02-e257-45b5-a060-4af6e3cf7a80 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.743319] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58089e38-c292-4852-86f5-572e16e500d2 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.756323] env[68040]: DEBUG nova.compute.provider_tree [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1194.765061] env[68040]: DEBUG nova.scheduler.client.report [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1194.786074] env[68040]: DEBUG oslo_concurrency.lockutils [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.443s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1194.786559] env[68040]: DEBUG nova.compute.manager [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Start building networks asynchronously for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1194.824484] env[68040]: DEBUG nova.compute.utils [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Using /dev/sd instead of None {{(pid=68040) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1194.825692] env[68040]: DEBUG nova.compute.manager [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Allocating IP information in the background. {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1194.825865] env[68040]: DEBUG nova.network.neutron [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] allocate_for_instance() {{(pid=68040) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1194.837482] env[68040]: DEBUG nova.compute.manager [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Start building block device mappings for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1194.913608] env[68040]: DEBUG nova.compute.manager [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Start spawning the instance on the hypervisor. {{(pid=68040) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1194.945957] env[68040]: DEBUG nova.policy [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '10b9790be591419c9e44760b96441006', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '605d49f1f3ec48388a1deb9e7b0a0d09', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68040) authorize /opt/stack/nova/nova/policy.py:203}} [ 1194.958134] env[68040]: DEBUG nova.virt.hardware [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:59:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:59:33Z,direct_url=,disk_format='vmdk',id=8c308313-03d5-40b6-a5fe-9037e32dc76e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0770d674a39c40089de0aade9440b370',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:59:34Z,virtual_size=,visibility=), allow threads: False {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1194.958404] env[68040]: DEBUG nova.virt.hardware [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Flavor limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1194.958565] env[68040]: DEBUG nova.virt.hardware [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Image limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1194.958748] env[68040]: DEBUG nova.virt.hardware [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Flavor pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1194.958896] env[68040]: DEBUG nova.virt.hardware [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Image pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1194.959081] env[68040]: DEBUG nova.virt.hardware [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1194.959323] env[68040]: DEBUG nova.virt.hardware [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1194.959494] env[68040]: DEBUG nova.virt.hardware [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1194.959668] env[68040]: DEBUG nova.virt.hardware [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Got 1 possible topologies {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1194.959830] env[68040]: DEBUG nova.virt.hardware [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1194.960010] env[68040]: DEBUG nova.virt.hardware [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1194.960874] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f25767f2-1e64-4b02-b26c-34a98023a0b9 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.968995] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf984bd1-a61e-4b44-b102-9ad6c4c110d3 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.316923] env[68040]: DEBUG nova.network.neutron [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Successfully created port: 6bd4708c-2cbe-4ed8-b73d-9bc392f7c12c {{(pid=68040) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1196.009219] env[68040]: DEBUG nova.network.neutron [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Successfully updated port: 6bd4708c-2cbe-4ed8-b73d-9bc392f7c12c {{(pid=68040) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1196.033732] env[68040]: DEBUG oslo_concurrency.lockutils [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Acquiring lock "refresh_cache-e92b662c-b458-49d8-ac2a-00ae6046a11b" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1196.033891] env[68040]: DEBUG oslo_concurrency.lockutils [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Acquired lock "refresh_cache-e92b662c-b458-49d8-ac2a-00ae6046a11b" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1196.034061] env[68040]: DEBUG nova.network.neutron [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Building network info cache for instance {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1196.078495] env[68040]: DEBUG nova.network.neutron [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1196.094069] env[68040]: DEBUG nova.compute.manager [req-5dbea7b4-4bc5-4be9-b916-9eaaf3424ae6 req-0a234c6a-650d-4b13-acd5-30920edc2037 service nova] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Received event network-vif-plugged-6bd4708c-2cbe-4ed8-b73d-9bc392f7c12c {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1196.094302] env[68040]: DEBUG oslo_concurrency.lockutils [req-5dbea7b4-4bc5-4be9-b916-9eaaf3424ae6 req-0a234c6a-650d-4b13-acd5-30920edc2037 service nova] Acquiring lock "e92b662c-b458-49d8-ac2a-00ae6046a11b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1196.094509] env[68040]: DEBUG oslo_concurrency.lockutils [req-5dbea7b4-4bc5-4be9-b916-9eaaf3424ae6 req-0a234c6a-650d-4b13-acd5-30920edc2037 service nova] Lock "e92b662c-b458-49d8-ac2a-00ae6046a11b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1196.094673] env[68040]: DEBUG oslo_concurrency.lockutils [req-5dbea7b4-4bc5-4be9-b916-9eaaf3424ae6 req-0a234c6a-650d-4b13-acd5-30920edc2037 service nova] Lock "e92b662c-b458-49d8-ac2a-00ae6046a11b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1196.094838] env[68040]: DEBUG nova.compute.manager [req-5dbea7b4-4bc5-4be9-b916-9eaaf3424ae6 req-0a234c6a-650d-4b13-acd5-30920edc2037 service nova] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] No waiting events found dispatching network-vif-plugged-6bd4708c-2cbe-4ed8-b73d-9bc392f7c12c {{(pid=68040) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1196.094999] env[68040]: WARNING nova.compute.manager [req-5dbea7b4-4bc5-4be9-b916-9eaaf3424ae6 req-0a234c6a-650d-4b13-acd5-30920edc2037 service nova] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Received unexpected event network-vif-plugged-6bd4708c-2cbe-4ed8-b73d-9bc392f7c12c for instance with vm_state building and task_state spawning. [ 1196.095331] env[68040]: DEBUG nova.compute.manager [req-5dbea7b4-4bc5-4be9-b916-9eaaf3424ae6 req-0a234c6a-650d-4b13-acd5-30920edc2037 service nova] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Received event network-changed-6bd4708c-2cbe-4ed8-b73d-9bc392f7c12c {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1196.095498] env[68040]: DEBUG nova.compute.manager [req-5dbea7b4-4bc5-4be9-b916-9eaaf3424ae6 req-0a234c6a-650d-4b13-acd5-30920edc2037 service nova] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Refreshing instance network info cache due to event network-changed-6bd4708c-2cbe-4ed8-b73d-9bc392f7c12c. {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1196.095666] env[68040]: DEBUG oslo_concurrency.lockutils [req-5dbea7b4-4bc5-4be9-b916-9eaaf3424ae6 req-0a234c6a-650d-4b13-acd5-30920edc2037 service nova] Acquiring lock "refresh_cache-e92b662c-b458-49d8-ac2a-00ae6046a11b" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1196.260612] env[68040]: DEBUG nova.network.neutron [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Updating instance_info_cache with network_info: [{"id": "6bd4708c-2cbe-4ed8-b73d-9bc392f7c12c", "address": "fa:16:3e:06:f0:f4", "network": {"id": "bfbde9a5-19e7-4423-8f0a-00ce13a290b6", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-115768575-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "605d49f1f3ec48388a1deb9e7b0a0d09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b56036cd-97ac-47f5-9089-7b38bfe99228", "external-id": "nsx-vlan-transportzone-301", "segmentation_id": 301, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6bd4708c-2c", "ovs_interfaceid": "6bd4708c-2cbe-4ed8-b73d-9bc392f7c12c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1196.272413] env[68040]: DEBUG oslo_concurrency.lockutils [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Releasing lock "refresh_cache-e92b662c-b458-49d8-ac2a-00ae6046a11b" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1196.272701] env[68040]: DEBUG nova.compute.manager [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Instance network_info: |[{"id": "6bd4708c-2cbe-4ed8-b73d-9bc392f7c12c", "address": "fa:16:3e:06:f0:f4", "network": {"id": "bfbde9a5-19e7-4423-8f0a-00ce13a290b6", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-115768575-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "605d49f1f3ec48388a1deb9e7b0a0d09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b56036cd-97ac-47f5-9089-7b38bfe99228", "external-id": "nsx-vlan-transportzone-301", "segmentation_id": 301, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6bd4708c-2c", "ovs_interfaceid": "6bd4708c-2cbe-4ed8-b73d-9bc392f7c12c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1196.272994] env[68040]: DEBUG oslo_concurrency.lockutils [req-5dbea7b4-4bc5-4be9-b916-9eaaf3424ae6 req-0a234c6a-650d-4b13-acd5-30920edc2037 service nova] Acquired lock "refresh_cache-e92b662c-b458-49d8-ac2a-00ae6046a11b" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1196.273194] env[68040]: DEBUG nova.network.neutron [req-5dbea7b4-4bc5-4be9-b916-9eaaf3424ae6 req-0a234c6a-650d-4b13-acd5-30920edc2037 service nova] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Refreshing network info cache for port 6bd4708c-2cbe-4ed8-b73d-9bc392f7c12c {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1196.274240] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:06:f0:f4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b56036cd-97ac-47f5-9089-7b38bfe99228', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6bd4708c-2cbe-4ed8-b73d-9bc392f7c12c', 'vif_model': 'vmxnet3'}] {{(pid=68040) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1196.281737] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Creating folder: Project (605d49f1f3ec48388a1deb9e7b0a0d09). Parent ref: group-v639956. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1196.282527] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8c172670-419e-462f-8214-21cc679ec69c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.297186] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Created folder: Project (605d49f1f3ec48388a1deb9e7b0a0d09) in parent group-v639956. [ 1196.297342] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Creating folder: Instances. Parent ref: group-v640023. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1196.297573] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e68e427e-b817-4cce-82d9-ba5651a9a18f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.306483] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Created folder: Instances in parent group-v640023. [ 1196.306708] env[68040]: DEBUG oslo.service.loopingcall [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1196.306889] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Creating VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1196.307095] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b4d26444-8fc8-40b8-8752-4b9b5ce68235 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.333296] env[68040]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1196.333296] env[68040]: value = "task-3200255" [ 1196.333296] env[68040]: _type = "Task" [ 1196.333296] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.341040] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200255, 'name': CreateVM_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.736037] env[68040]: DEBUG nova.network.neutron [req-5dbea7b4-4bc5-4be9-b916-9eaaf3424ae6 req-0a234c6a-650d-4b13-acd5-30920edc2037 service nova] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Updated VIF entry in instance network info cache for port 6bd4708c-2cbe-4ed8-b73d-9bc392f7c12c. {{(pid=68040) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1196.736501] env[68040]: DEBUG nova.network.neutron [req-5dbea7b4-4bc5-4be9-b916-9eaaf3424ae6 req-0a234c6a-650d-4b13-acd5-30920edc2037 service nova] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Updating instance_info_cache with network_info: [{"id": "6bd4708c-2cbe-4ed8-b73d-9bc392f7c12c", "address": "fa:16:3e:06:f0:f4", "network": {"id": "bfbde9a5-19e7-4423-8f0a-00ce13a290b6", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-115768575-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "605d49f1f3ec48388a1deb9e7b0a0d09", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b56036cd-97ac-47f5-9089-7b38bfe99228", "external-id": "nsx-vlan-transportzone-301", "segmentation_id": 301, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6bd4708c-2c", "ovs_interfaceid": "6bd4708c-2cbe-4ed8-b73d-9bc392f7c12c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1196.749679] env[68040]: DEBUG oslo_concurrency.lockutils [req-5dbea7b4-4bc5-4be9-b916-9eaaf3424ae6 req-0a234c6a-650d-4b13-acd5-30920edc2037 service nova] Releasing lock "refresh_cache-e92b662c-b458-49d8-ac2a-00ae6046a11b" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1196.844084] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200255, 'name': CreateVM_Task, 'duration_secs': 0.300625} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.844269] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Created VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1196.844927] env[68040]: DEBUG oslo_concurrency.lockutils [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1196.845111] env[68040]: DEBUG oslo_concurrency.lockutils [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1196.845427] env[68040]: DEBUG oslo_concurrency.lockutils [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1196.845678] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f27d9841-9a16-4545-a7e9-24a2004f87f8 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.851801] env[68040]: DEBUG oslo_vmware.api [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Waiting for the task: (returnval){ [ 1196.851801] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52476406-3bd1-4801-21ba-8b4267100d3d" [ 1196.851801] env[68040]: _type = "Task" [ 1196.851801] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.859348] env[68040]: DEBUG oslo_vmware.api [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52476406-3bd1-4801-21ba-8b4267100d3d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.361625] env[68040]: DEBUG oslo_concurrency.lockutils [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1197.361936] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Processing image 8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1197.362099] env[68040]: DEBUG oslo_concurrency.lockutils [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1203.247402] env[68040]: DEBUG oslo_concurrency.lockutils [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Acquiring lock "4a08d3e3-5e84-4f34-b418-2c18eadbef25" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1203.247730] env[68040]: DEBUG oslo_concurrency.lockutils [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Lock "4a08d3e3-5e84-4f34-b418-2c18eadbef25" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1205.984955] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1205.998028] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1205.998172] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1205.998382] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1205.998543] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68040) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1205.999712] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db6881e6-664f-4470-8e1f-f28b10fd8673 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.008811] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5108473-e977-4eb6-a93a-18f1d84e72b9 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.023817] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-099e7426-6a8f-46c6-b4c5-9b6b7284211b {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.030037] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcdfa418-0e2e-48c6-be3d-111b13f3eafd {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.058321] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180934MB free_disk=125GB free_vcpus=48 pci_devices=None {{(pid=68040) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1206.058478] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1206.058674] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1206.260150] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 42f39352-e703-4ebf-9559-4c8b5abca70e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1206.260359] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance d1819f29-a891-47dd-a456-8f3b127daf6f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1206.260518] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 1812f13e-b03d-48d4-940a-43974784265b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1206.260652] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance de1b8ef9-0088-4d2a-985e-d04fcff55d31 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1206.260777] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance bce68a2b-260c-45cc-ac98-d4b01b4513a4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1206.260897] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 1e43f6be-f6a3-4569-adea-c82a5d709247 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1206.261025] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 3738de32-79cd-4b04-8081-cc1146730c75 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1206.261152] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1206.261271] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1206.261386] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance e92b662c-b458-49d8-ac2a-00ae6046a11b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1206.274628] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 57cd94c2-aec3-427e-9b9f-a444fe291974 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1206.286037] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 87a7851e-d6fe-481a-8abb-5732e281cb64 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1206.296973] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance bd14d08b-d71a-43b0-b72a-6504dc0e2142 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1206.306576] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 00305041-c0c0-4b7b-9149-9bcba4392279 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1206.316828] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 6011cb9b-e882-4eb2-96b4-82a43585acbc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1206.327483] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 81cfab4f-6a32-42b0-bbfc-45596bc9ad4e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1206.336981] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance f6edf79d-5eff-4e2c-94d2-aa5cf1731748 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1206.346061] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance c581d685-7ea0-41f8-b911-ff1dce1b46c7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1206.355167] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 310e3ad1-aa4c-44d1-b1e9-152d1de39125 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1206.365292] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 72f5f3ba-c931-40a5-ab73-4e6738e0aaba has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1206.374524] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance c2f603dd-6a9b-4a0f-b50a-263cf8eb70af has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1206.383900] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance ec66dda9-4e56-4baa-b8aa-8b01f28d8e9c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1206.392884] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 8535d103-7bdf-4210-aa1e-180bb100de5f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1206.405088] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 462c8f10-1dda-4687-946c-fb40c3e4f049 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1206.414661] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance e54d02e5-1e98-4e9d-93e7-bcccfa3307e1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1206.424031] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 4a08d3e3-5e84-4f34-b418-2c18eadbef25 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1206.424269] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1206.424422] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1206.716275] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cdb758d-c55f-4d81-8476-c1e6d0d5b895 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.724274] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7397ffd-fb8d-48d9-9c2d-b692d9a337a5 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.754429] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a72cbf2-2a7c-47b9-9700-403dcb83277f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.761947] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10ba24ac-ef6a-4d84-aa9e-c1bc20330428 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.774459] env[68040]: DEBUG nova.compute.provider_tree [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1206.783380] env[68040]: DEBUG nova.scheduler.client.report [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1206.799708] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68040) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1206.799894] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.741s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1206.800122] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1206.800262] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Cleaning up deleted instances with incomplete migration {{(pid=68040) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11257}} [ 1207.807619] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1207.807986] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1207.983587] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1207.983810] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1208.983955] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1208.984232] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Starting heal instance info cache {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1208.984269] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Rebuilding the list of instances to heal {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1209.009224] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1209.009387] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1209.009521] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1209.009693] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1209.009839] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1209.010090] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1209.010309] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1209.010449] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1209.010570] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1209.010690] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1209.010811] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Didn't find any instances for network info cache update. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1209.983761] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1209.984012] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1209.984262] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Cleaning up deleted instances {{(pid=68040) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11219}} [ 1209.994213] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] There are 0 instances to clean {{(pid=68040) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 1210.984388] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1210.984706] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1211.006928] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1211.146915] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._sync_power_states {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1211.166909] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Getting list of instances from cluster (obj){ [ 1211.166909] env[68040]: value = "domain-c8" [ 1211.166909] env[68040]: _type = "ClusterComputeResource" [ 1211.166909] env[68040]: } {{(pid=68040) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1211.168665] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cd3e66f-f749-4c5c-a1bf-1e93e35d0ce2 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.185282] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Got total of 10 instances {{(pid=68040) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1211.185450] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Triggering sync for uuid 42f39352-e703-4ebf-9559-4c8b5abca70e {{(pid=68040) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1211.185640] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Triggering sync for uuid d1819f29-a891-47dd-a456-8f3b127daf6f {{(pid=68040) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1211.185800] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Triggering sync for uuid 1812f13e-b03d-48d4-940a-43974784265b {{(pid=68040) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1211.185951] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Triggering sync for uuid de1b8ef9-0088-4d2a-985e-d04fcff55d31 {{(pid=68040) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1211.186139] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Triggering sync for uuid bce68a2b-260c-45cc-ac98-d4b01b4513a4 {{(pid=68040) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1211.186314] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Triggering sync for uuid 1e43f6be-f6a3-4569-adea-c82a5d709247 {{(pid=68040) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1211.186464] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Triggering sync for uuid 3738de32-79cd-4b04-8081-cc1146730c75 {{(pid=68040) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1211.186701] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Triggering sync for uuid e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd {{(pid=68040) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1211.186869] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Triggering sync for uuid b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77 {{(pid=68040) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1211.187053] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Triggering sync for uuid e92b662c-b458-49d8-ac2a-00ae6046a11b {{(pid=68040) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1211.187489] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "42f39352-e703-4ebf-9559-4c8b5abca70e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1211.187836] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "d1819f29-a891-47dd-a456-8f3b127daf6f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1211.188206] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "1812f13e-b03d-48d4-940a-43974784265b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1211.188531] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "de1b8ef9-0088-4d2a-985e-d04fcff55d31" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1211.188870] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "bce68a2b-260c-45cc-ac98-d4b01b4513a4" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1211.189191] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "1e43f6be-f6a3-4569-adea-c82a5d709247" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1211.189459] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "3738de32-79cd-4b04-8081-cc1146730c75" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1211.189716] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1211.189865] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1211.190074] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "e92b662c-b458-49d8-ac2a-00ae6046a11b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1212.984079] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1212.984458] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68040) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1243.041253] env[68040]: WARNING oslo_vmware.rw_handles [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1243.041253] env[68040]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1243.041253] env[68040]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1243.041253] env[68040]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1243.041253] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1243.041253] env[68040]: ERROR oslo_vmware.rw_handles response.begin() [ 1243.041253] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1243.041253] env[68040]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1243.041253] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1243.041253] env[68040]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1243.041253] env[68040]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1243.041253] env[68040]: ERROR oslo_vmware.rw_handles [ 1243.041800] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Downloaded image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to vmware_temp/512953c3-553a-4ee5-8a96-7c14be80e78e/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1243.043698] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Caching image {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1243.043985] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Copying Virtual Disk [datastore2] vmware_temp/512953c3-553a-4ee5-8a96-7c14be80e78e/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk to [datastore2] vmware_temp/512953c3-553a-4ee5-8a96-7c14be80e78e/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk {{(pid=68040) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1243.044229] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fb6390d9-cd04-4fc1-80ef-d5e644943677 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.051587] env[68040]: DEBUG oslo_vmware.api [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Waiting for the task: (returnval){ [ 1243.051587] env[68040]: value = "task-3200256" [ 1243.051587] env[68040]: _type = "Task" [ 1243.051587] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.059816] env[68040]: DEBUG oslo_vmware.api [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Task: {'id': task-3200256, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.561518] env[68040]: DEBUG oslo_vmware.exceptions [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Fault InvalidArgument not matched. {{(pid=68040) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1243.561815] env[68040]: DEBUG oslo_concurrency.lockutils [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1243.562366] env[68040]: ERROR nova.compute.manager [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1243.562366] env[68040]: Faults: ['InvalidArgument'] [ 1243.562366] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Traceback (most recent call last): [ 1243.562366] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1243.562366] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] yield resources [ 1243.562366] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1243.562366] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] self.driver.spawn(context, instance, image_meta, [ 1243.562366] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1243.562366] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1243.562366] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1243.562366] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] self._fetch_image_if_missing(context, vi) [ 1243.562366] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1243.562737] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] image_cache(vi, tmp_image_ds_loc) [ 1243.562737] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1243.562737] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] vm_util.copy_virtual_disk( [ 1243.562737] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1243.562737] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] session._wait_for_task(vmdk_copy_task) [ 1243.562737] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1243.562737] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] return self.wait_for_task(task_ref) [ 1243.562737] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1243.562737] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] return evt.wait() [ 1243.562737] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1243.562737] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] result = hub.switch() [ 1243.562737] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1243.562737] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] return self.greenlet.switch() [ 1243.563128] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1243.563128] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] self.f(*self.args, **self.kw) [ 1243.563128] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1243.563128] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] raise exceptions.translate_fault(task_info.error) [ 1243.563128] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1243.563128] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Faults: ['InvalidArgument'] [ 1243.563128] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] [ 1243.563128] env[68040]: INFO nova.compute.manager [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Terminating instance [ 1243.564885] env[68040]: DEBUG oslo_concurrency.lockutils [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1243.565131] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1243.565653] env[68040]: DEBUG oslo_concurrency.lockutils [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Acquiring lock "refresh_cache-42f39352-e703-4ebf-9559-4c8b5abca70e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1243.565810] env[68040]: DEBUG oslo_concurrency.lockutils [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Acquired lock "refresh_cache-42f39352-e703-4ebf-9559-4c8b5abca70e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1243.565973] env[68040]: DEBUG nova.network.neutron [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Building network info cache for instance {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1243.566937] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b23ed563-df13-45ca-88a9-b4bb08502366 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.576374] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1243.576563] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68040) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1243.577539] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f33621e5-0b53-4e71-b2aa-000fa3f45373 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.582919] env[68040]: DEBUG oslo_vmware.api [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Waiting for the task: (returnval){ [ 1243.582919] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]5200da1a-8cdf-f326-7e54-dbd987ec7ee6" [ 1243.582919] env[68040]: _type = "Task" [ 1243.582919] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.591769] env[68040]: DEBUG oslo_vmware.api [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]5200da1a-8cdf-f326-7e54-dbd987ec7ee6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.593787] env[68040]: DEBUG nova.network.neutron [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1243.656618] env[68040]: DEBUG nova.network.neutron [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1243.665511] env[68040]: DEBUG oslo_concurrency.lockutils [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Releasing lock "refresh_cache-42f39352-e703-4ebf-9559-4c8b5abca70e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1243.665892] env[68040]: DEBUG nova.compute.manager [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1243.666099] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1243.667252] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b413d1ba-fa25-4f11-b4e5-1785e9d38cbc {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.674766] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Unregistering the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1243.674984] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b1600dfc-d2b9-4069-9fe5-dea23514c708 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.712585] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Unregistered the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1243.712772] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Deleting contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1243.712944] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Deleting the datastore file [datastore2] 42f39352-e703-4ebf-9559-4c8b5abca70e {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1243.713219] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4bdea027-6529-4a56-93b7-4e027ccbaa1c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.723093] env[68040]: DEBUG oslo_vmware.api [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Waiting for the task: (returnval){ [ 1243.723093] env[68040]: value = "task-3200258" [ 1243.723093] env[68040]: _type = "Task" [ 1243.723093] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.731905] env[68040]: DEBUG oslo_vmware.api [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Task: {'id': task-3200258, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.092827] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Preparing fetch location {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1244.093159] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Creating directory with path [datastore2] vmware_temp/84e89c70-6f55-40c4-8835-4138b2b7756c/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1244.093344] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c0094ad2-cdbf-42c6-bee9-61f6694c83e2 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.104755] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Created directory with path [datastore2] vmware_temp/84e89c70-6f55-40c4-8835-4138b2b7756c/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1244.104962] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Fetch image to [datastore2] vmware_temp/84e89c70-6f55-40c4-8835-4138b2b7756c/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1244.105160] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to [datastore2] vmware_temp/84e89c70-6f55-40c4-8835-4138b2b7756c/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1244.105899] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daf3b0b7-8222-4c18-8363-fca90f7339e4 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.112457] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da5cca7e-ef1f-4c99-a759-aa070246d13a {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.121196] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61745cbd-53d1-467e-b54a-2f154cd8d7ba {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.150989] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e68dde1-95eb-4e80-8934-b063ada2c229 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.157191] env[68040]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-bd104522-bcbb-461f-9019-ea4b67106f65 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.178917] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1244.232283] env[68040]: DEBUG oslo_vmware.api [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Task: {'id': task-3200258, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.031926} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1244.232544] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Deleted the datastore file {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1244.232772] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Deleted contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1244.232977] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1244.233162] env[68040]: INFO nova.compute.manager [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Took 0.57 seconds to destroy the instance on the hypervisor. [ 1244.233376] env[68040]: DEBUG oslo.service.loopingcall [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1244.233578] env[68040]: DEBUG nova.compute.manager [-] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Skipping network deallocation for instance since networking was not requested. {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 1244.235860] env[68040]: DEBUG nova.compute.claims [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Aborting claim: {{(pid=68040) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1244.236023] env[68040]: DEBUG oslo_concurrency.lockutils [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1244.236251] env[68040]: DEBUG oslo_concurrency.lockutils [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1244.347645] env[68040]: DEBUG oslo_concurrency.lockutils [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1244.349877] env[68040]: ERROR nova.compute.manager [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image 8c308313-03d5-40b6-a5fe-9037e32dc76e. [ 1244.349877] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Traceback (most recent call last): [ 1244.349877] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1244.349877] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1244.349877] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1244.349877] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] result = getattr(controller, method)(*args, **kwargs) [ 1244.349877] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1244.349877] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] return self._get(image_id) [ 1244.349877] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1244.349877] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1244.349877] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1244.350265] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] resp, body = self.http_client.get(url, headers=header) [ 1244.350265] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1244.350265] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] return self.request(url, 'GET', **kwargs) [ 1244.350265] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1244.350265] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] return self._handle_response(resp) [ 1244.350265] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1244.350265] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] raise exc.from_response(resp, resp.content) [ 1244.350265] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1244.350265] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] [ 1244.350265] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] During handling of the above exception, another exception occurred: [ 1244.350265] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] [ 1244.350265] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Traceback (most recent call last): [ 1244.350570] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1244.350570] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] yield resources [ 1244.350570] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1244.350570] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] self.driver.spawn(context, instance, image_meta, [ 1244.350570] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1244.350570] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1244.350570] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1244.350570] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] self._fetch_image_if_missing(context, vi) [ 1244.350570] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1244.350570] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] image_fetch(context, vi, tmp_image_ds_loc) [ 1244.350570] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1244.350570] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] images.fetch_image( [ 1244.350570] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1244.351257] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] metadata = IMAGE_API.get(context, image_ref) [ 1244.351257] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1244.351257] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] return session.show(context, image_id, [ 1244.351257] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1244.351257] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] _reraise_translated_image_exception(image_id) [ 1244.351257] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1244.351257] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] raise new_exc.with_traceback(exc_trace) [ 1244.351257] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1244.351257] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1244.351257] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1244.351257] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] result = getattr(controller, method)(*args, **kwargs) [ 1244.351257] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1244.351257] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] return self._get(image_id) [ 1244.351867] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1244.351867] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1244.351867] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1244.351867] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] resp, body = self.http_client.get(url, headers=header) [ 1244.351867] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1244.351867] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] return self.request(url, 'GET', **kwargs) [ 1244.351867] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1244.351867] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] return self._handle_response(resp) [ 1244.351867] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1244.351867] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] raise exc.from_response(resp, resp.content) [ 1244.351867] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] nova.exception.ImageNotAuthorized: Not authorized for image 8c308313-03d5-40b6-a5fe-9037e32dc76e. [ 1244.351867] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] [ 1244.352203] env[68040]: INFO nova.compute.manager [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Terminating instance [ 1244.352203] env[68040]: DEBUG oslo_concurrency.lockutils [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1244.352203] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1244.352203] env[68040]: DEBUG nova.compute.manager [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1244.352203] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1244.352351] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-123d69f2-2c32-4308-8683-f9dfa895f499 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.355120] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a31192f9-3857-4efe-81b6-29c2cc9aaf45 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.363992] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Unregistering the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1244.364218] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fff59fc3-4087-419e-afcc-189b54980d75 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.366452] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1244.366635] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68040) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1244.367569] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-58de6703-1a97-4d29-ba72-68db1a36461f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.374306] env[68040]: DEBUG oslo_vmware.api [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Waiting for the task: (returnval){ [ 1244.374306] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52d63062-b2b9-78b8-8cde-4c4abcc959f3" [ 1244.374306] env[68040]: _type = "Task" [ 1244.374306] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.381196] env[68040]: DEBUG oslo_vmware.api [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52d63062-b2b9-78b8-8cde-4c4abcc959f3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.429530] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Unregistered the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1244.431275] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Deleting contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1244.431275] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Deleting the datastore file [datastore2] d1819f29-a891-47dd-a456-8f3b127daf6f {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1244.431275] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dcb7ec1b-81d3-43d5-9bbf-44a87c7d506b {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.435952] env[68040]: DEBUG oslo_vmware.api [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Waiting for the task: (returnval){ [ 1244.435952] env[68040]: value = "task-3200260" [ 1244.435952] env[68040]: _type = "Task" [ 1244.435952] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.446830] env[68040]: DEBUG oslo_vmware.api [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Task: {'id': task-3200260, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.604030] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd98a033-7d0f-4169-813b-fee2ef01385e {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.612020] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73a37553-a6f3-41a5-9fc9-26f630fb87c2 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.639802] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c80520d0-5a92-4b61-8407-a9e97f339f64 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.646769] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf983e57-424a-43de-9d2c-273c4e244581 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.659548] env[68040]: DEBUG nova.compute.provider_tree [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1244.668130] env[68040]: DEBUG nova.scheduler.client.report [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1244.682841] env[68040]: DEBUG oslo_concurrency.lockutils [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.446s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1244.683341] env[68040]: ERROR nova.compute.manager [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1244.683341] env[68040]: Faults: ['InvalidArgument'] [ 1244.683341] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Traceback (most recent call last): [ 1244.683341] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1244.683341] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] self.driver.spawn(context, instance, image_meta, [ 1244.683341] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1244.683341] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1244.683341] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1244.683341] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] self._fetch_image_if_missing(context, vi) [ 1244.683341] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1244.683341] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] image_cache(vi, tmp_image_ds_loc) [ 1244.683341] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1244.683724] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] vm_util.copy_virtual_disk( [ 1244.683724] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1244.683724] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] session._wait_for_task(vmdk_copy_task) [ 1244.683724] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1244.683724] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] return self.wait_for_task(task_ref) [ 1244.683724] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1244.683724] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] return evt.wait() [ 1244.683724] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1244.683724] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] result = hub.switch() [ 1244.683724] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1244.683724] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] return self.greenlet.switch() [ 1244.683724] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1244.683724] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] self.f(*self.args, **self.kw) [ 1244.684114] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1244.684114] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] raise exceptions.translate_fault(task_info.error) [ 1244.684114] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1244.684114] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Faults: ['InvalidArgument'] [ 1244.684114] env[68040]: ERROR nova.compute.manager [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] [ 1244.684114] env[68040]: DEBUG nova.compute.utils [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] VimFaultException {{(pid=68040) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1244.685487] env[68040]: DEBUG nova.compute.manager [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Build of instance 42f39352-e703-4ebf-9559-4c8b5abca70e was re-scheduled: A specified parameter was not correct: fileType [ 1244.685487] env[68040]: Faults: ['InvalidArgument'] {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1244.685878] env[68040]: DEBUG nova.compute.manager [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Unplugging VIFs for instance {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1244.686122] env[68040]: DEBUG oslo_concurrency.lockutils [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Acquiring lock "refresh_cache-42f39352-e703-4ebf-9559-4c8b5abca70e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1244.686274] env[68040]: DEBUG oslo_concurrency.lockutils [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Acquired lock "refresh_cache-42f39352-e703-4ebf-9559-4c8b5abca70e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1244.686441] env[68040]: DEBUG nova.network.neutron [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Building network info cache for instance {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1244.712933] env[68040]: DEBUG nova.network.neutron [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1244.770801] env[68040]: DEBUG nova.network.neutron [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1244.779838] env[68040]: DEBUG oslo_concurrency.lockutils [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Releasing lock "refresh_cache-42f39352-e703-4ebf-9559-4c8b5abca70e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1244.780068] env[68040]: DEBUG nova.compute.manager [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1244.780256] env[68040]: DEBUG nova.compute.manager [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Skipping network deallocation for instance since networking was not requested. {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 1244.862496] env[68040]: INFO nova.scheduler.client.report [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Deleted allocations for instance 42f39352-e703-4ebf-9559-4c8b5abca70e [ 1244.881529] env[68040]: DEBUG oslo_concurrency.lockutils [None req-706ef279-5ca3-4368-8f2f-f7c53df057af tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Lock "42f39352-e703-4ebf-9559-4c8b5abca70e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 575.973s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1244.886965] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ff56fe6d-fde1-47e9-8aed-c0a208592e0a tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Lock "42f39352-e703-4ebf-9559-4c8b5abca70e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 373.450s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1244.887214] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ff56fe6d-fde1-47e9-8aed-c0a208592e0a tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Acquiring lock "42f39352-e703-4ebf-9559-4c8b5abca70e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1244.887427] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ff56fe6d-fde1-47e9-8aed-c0a208592e0a tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Lock "42f39352-e703-4ebf-9559-4c8b5abca70e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1244.887617] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ff56fe6d-fde1-47e9-8aed-c0a208592e0a tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Lock "42f39352-e703-4ebf-9559-4c8b5abca70e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1244.889712] env[68040]: INFO nova.compute.manager [None req-ff56fe6d-fde1-47e9-8aed-c0a208592e0a tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Terminating instance [ 1244.891029] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Preparing fetch location {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1244.891269] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Creating directory with path [datastore2] vmware_temp/39a2b58f-3751-4b41-88ed-3232d5eee879/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1244.891790] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ff56fe6d-fde1-47e9-8aed-c0a208592e0a tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Acquiring lock "refresh_cache-42f39352-e703-4ebf-9559-4c8b5abca70e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1244.891977] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ff56fe6d-fde1-47e9-8aed-c0a208592e0a tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Acquired lock "refresh_cache-42f39352-e703-4ebf-9559-4c8b5abca70e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1244.892163] env[68040]: DEBUG nova.network.neutron [None req-ff56fe6d-fde1-47e9-8aed-c0a208592e0a tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Building network info cache for instance {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1244.893171] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2e16c52c-af1f-4567-8a26-54ae73219870 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.898532] env[68040]: DEBUG nova.compute.manager [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1244.908935] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Created directory with path [datastore2] vmware_temp/39a2b58f-3751-4b41-88ed-3232d5eee879/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1244.908935] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Fetch image to [datastore2] vmware_temp/39a2b58f-3751-4b41-88ed-3232d5eee879/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1244.908935] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to [datastore2] vmware_temp/39a2b58f-3751-4b41-88ed-3232d5eee879/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1244.909211] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ace5476-aa74-4826-8571-6df150113334 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.916208] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ed98159-ceae-46d2-b923-e678e7fb0b0c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.919084] env[68040]: DEBUG nova.network.neutron [None req-ff56fe6d-fde1-47e9-8aed-c0a208592e0a tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1244.927633] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-958eea00-8096-4981-9460-0e8ac16febf4 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.966175] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b39ce995-f123-415d-958e-8f96577d618e {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.976747] env[68040]: DEBUG oslo_vmware.api [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Task: {'id': task-3200260, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.076296} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1244.978185] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Deleted the datastore file {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1244.978381] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Deleted contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1244.978585] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1244.978753] env[68040]: INFO nova.compute.manager [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Took 0.63 seconds to destroy the instance on the hypervisor. [ 1244.980419] env[68040]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d50875d4-afe1-41f4-904a-d0a28f8860d5 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.982310] env[68040]: DEBUG nova.compute.claims [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Aborting claim: {{(pid=68040) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1244.982489] env[68040]: DEBUG oslo_concurrency.lockutils [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1244.982695] env[68040]: DEBUG oslo_concurrency.lockutils [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1244.986636] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1245.007927] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1245.010462] env[68040]: DEBUG nova.network.neutron [None req-ff56fe6d-fde1-47e9-8aed-c0a208592e0a tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1245.021830] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ff56fe6d-fde1-47e9-8aed-c0a208592e0a tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Releasing lock "refresh_cache-42f39352-e703-4ebf-9559-4c8b5abca70e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1245.022192] env[68040]: DEBUG nova.compute.manager [None req-ff56fe6d-fde1-47e9-8aed-c0a208592e0a tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1245.022390] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-ff56fe6d-fde1-47e9-8aed-c0a208592e0a tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1245.023074] env[68040]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-396e7c05-81e1-48f5-8183-13ada1c727f5 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.031487] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbb4b9d8-59b9-4340-94d8-0bfc5b43c6fc {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.063749] env[68040]: WARNING nova.virt.vmwareapi.vmops [None req-ff56fe6d-fde1-47e9-8aed-c0a208592e0a tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 42f39352-e703-4ebf-9559-4c8b5abca70e could not be found. [ 1245.063962] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-ff56fe6d-fde1-47e9-8aed-c0a208592e0a tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1245.064164] env[68040]: INFO nova.compute.manager [None req-ff56fe6d-fde1-47e9-8aed-c0a208592e0a tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1245.064407] env[68040]: DEBUG oslo.service.loopingcall [None req-ff56fe6d-fde1-47e9-8aed-c0a208592e0a tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1245.068108] env[68040]: DEBUG nova.compute.manager [-] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1245.068213] env[68040]: DEBUG nova.network.neutron [-] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1245.083461] env[68040]: DEBUG oslo_vmware.rw_handles [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/39a2b58f-3751-4b41-88ed-3232d5eee879/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1245.086187] env[68040]: DEBUG nova.network.neutron [-] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1245.142771] env[68040]: DEBUG nova.network.neutron [-] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1245.146615] env[68040]: DEBUG oslo_vmware.rw_handles [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Completed reading data from the image iterator. {{(pid=68040) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1245.146683] env[68040]: DEBUG oslo_vmware.rw_handles [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/39a2b58f-3751-4b41-88ed-3232d5eee879/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1245.151505] env[68040]: INFO nova.compute.manager [-] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] Took 0.08 seconds to deallocate network for instance. [ 1245.245686] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ff56fe6d-fde1-47e9-8aed-c0a208592e0a tempest-ServersAdmin275Test-2050891295 tempest-ServersAdmin275Test-2050891295-project-member] Lock "42f39352-e703-4ebf-9559-4c8b5abca70e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.359s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1245.246741] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "42f39352-e703-4ebf-9559-4c8b5abca70e" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 34.059s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1245.246848] env[68040]: INFO nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 42f39352-e703-4ebf-9559-4c8b5abca70e] During sync_power_state the instance has a pending task (deleting). Skip. [ 1245.250008] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "42f39352-e703-4ebf-9559-4c8b5abca70e" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1245.404301] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6936206-db87-4b82-a9de-ae82a2016e2b {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.412454] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4e45ff7-62ed-417f-8a56-cb1adc8ba218 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.442264] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b4269d8-d65f-420a-9313-8abf03fa6285 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.449169] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67c864ad-c863-4f47-8f73-725c718ad6b5 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.462170] env[68040]: DEBUG nova.compute.provider_tree [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1245.470768] env[68040]: DEBUG nova.scheduler.client.report [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1245.484077] env[68040]: DEBUG oslo_concurrency.lockutils [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.501s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1245.484800] env[68040]: ERROR nova.compute.manager [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image 8c308313-03d5-40b6-a5fe-9037e32dc76e. [ 1245.484800] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Traceback (most recent call last): [ 1245.484800] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1245.484800] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1245.484800] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1245.484800] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] result = getattr(controller, method)(*args, **kwargs) [ 1245.484800] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1245.484800] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] return self._get(image_id) [ 1245.484800] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1245.484800] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1245.484800] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1245.485109] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] resp, body = self.http_client.get(url, headers=header) [ 1245.485109] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1245.485109] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] return self.request(url, 'GET', **kwargs) [ 1245.485109] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1245.485109] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] return self._handle_response(resp) [ 1245.485109] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1245.485109] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] raise exc.from_response(resp, resp.content) [ 1245.485109] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1245.485109] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] [ 1245.485109] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] During handling of the above exception, another exception occurred: [ 1245.485109] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] [ 1245.485109] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Traceback (most recent call last): [ 1245.485627] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1245.485627] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] self.driver.spawn(context, instance, image_meta, [ 1245.485627] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1245.485627] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1245.485627] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1245.485627] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] self._fetch_image_if_missing(context, vi) [ 1245.485627] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1245.485627] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] image_fetch(context, vi, tmp_image_ds_loc) [ 1245.485627] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1245.485627] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] images.fetch_image( [ 1245.485627] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1245.485627] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] metadata = IMAGE_API.get(context, image_ref) [ 1245.485627] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1245.486159] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] return session.show(context, image_id, [ 1245.486159] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1245.486159] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] _reraise_translated_image_exception(image_id) [ 1245.486159] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1245.486159] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] raise new_exc.with_traceback(exc_trace) [ 1245.486159] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1245.486159] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1245.486159] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1245.486159] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] result = getattr(controller, method)(*args, **kwargs) [ 1245.486159] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1245.486159] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] return self._get(image_id) [ 1245.486159] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1245.486159] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1245.486611] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1245.486611] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] resp, body = self.http_client.get(url, headers=header) [ 1245.486611] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1245.486611] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] return self.request(url, 'GET', **kwargs) [ 1245.486611] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1245.486611] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] return self._handle_response(resp) [ 1245.486611] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1245.486611] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] raise exc.from_response(resp, resp.content) [ 1245.486611] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] nova.exception.ImageNotAuthorized: Not authorized for image 8c308313-03d5-40b6-a5fe-9037e32dc76e. [ 1245.486611] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] [ 1245.486877] env[68040]: DEBUG nova.compute.utils [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Not authorized for image 8c308313-03d5-40b6-a5fe-9037e32dc76e. {{(pid=68040) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1245.486877] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.501s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1245.488181] env[68040]: INFO nova.compute.claims [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1245.490797] env[68040]: DEBUG nova.compute.manager [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Build of instance d1819f29-a891-47dd-a456-8f3b127daf6f was re-scheduled: Not authorized for image 8c308313-03d5-40b6-a5fe-9037e32dc76e. {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1245.491303] env[68040]: DEBUG nova.compute.manager [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Unplugging VIFs for instance {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1245.491479] env[68040]: DEBUG nova.compute.manager [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1245.491639] env[68040]: DEBUG nova.compute.manager [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1245.491801] env[68040]: DEBUG nova.network.neutron [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1245.660559] env[68040]: DEBUG neutronclient.v2_0.client [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=68040) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1245.662780] env[68040]: ERROR nova.compute.manager [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 1245.662780] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Traceback (most recent call last): [ 1245.662780] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1245.662780] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1245.662780] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1245.662780] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] result = getattr(controller, method)(*args, **kwargs) [ 1245.662780] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1245.662780] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] return self._get(image_id) [ 1245.662780] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1245.662780] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1245.662780] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1245.663177] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] resp, body = self.http_client.get(url, headers=header) [ 1245.663177] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1245.663177] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] return self.request(url, 'GET', **kwargs) [ 1245.663177] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1245.663177] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] return self._handle_response(resp) [ 1245.663177] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1245.663177] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] raise exc.from_response(resp, resp.content) [ 1245.663177] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1245.663177] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] [ 1245.663177] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] During handling of the above exception, another exception occurred: [ 1245.663177] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] [ 1245.663177] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Traceback (most recent call last): [ 1245.663531] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1245.663531] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] self.driver.spawn(context, instance, image_meta, [ 1245.663531] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1245.663531] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1245.663531] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1245.663531] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] self._fetch_image_if_missing(context, vi) [ 1245.663531] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1245.663531] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] image_fetch(context, vi, tmp_image_ds_loc) [ 1245.663531] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1245.663531] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] images.fetch_image( [ 1245.663531] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1245.663531] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] metadata = IMAGE_API.get(context, image_ref) [ 1245.663531] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1245.663900] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] return session.show(context, image_id, [ 1245.663900] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1245.663900] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] _reraise_translated_image_exception(image_id) [ 1245.663900] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1245.663900] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] raise new_exc.with_traceback(exc_trace) [ 1245.663900] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1245.663900] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1245.663900] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1245.663900] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] result = getattr(controller, method)(*args, **kwargs) [ 1245.663900] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1245.663900] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] return self._get(image_id) [ 1245.663900] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1245.663900] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1245.664284] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1245.664284] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] resp, body = self.http_client.get(url, headers=header) [ 1245.664284] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1245.664284] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] return self.request(url, 'GET', **kwargs) [ 1245.664284] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1245.664284] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] return self._handle_response(resp) [ 1245.664284] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1245.664284] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] raise exc.from_response(resp, resp.content) [ 1245.664284] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] nova.exception.ImageNotAuthorized: Not authorized for image 8c308313-03d5-40b6-a5fe-9037e32dc76e. [ 1245.664284] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] [ 1245.664284] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] During handling of the above exception, another exception occurred: [ 1245.664284] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] [ 1245.664284] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Traceback (most recent call last): [ 1245.664658] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/compute/manager.py", line 2447, in _do_build_and_run_instance [ 1245.664658] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] self._build_and_run_instance(context, instance, image, [ 1245.664658] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/compute/manager.py", line 2739, in _build_and_run_instance [ 1245.664658] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] raise exception.RescheduledException( [ 1245.664658] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] nova.exception.RescheduledException: Build of instance d1819f29-a891-47dd-a456-8f3b127daf6f was re-scheduled: Not authorized for image 8c308313-03d5-40b6-a5fe-9037e32dc76e. [ 1245.664658] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] [ 1245.664658] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] During handling of the above exception, another exception occurred: [ 1245.664658] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] [ 1245.664658] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Traceback (most recent call last): [ 1245.664658] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1245.664658] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] ret = obj(*args, **kwargs) [ 1245.664658] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1245.664658] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] exception_handler_v20(status_code, error_body) [ 1245.665019] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1245.665019] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] raise client_exc(message=error_message, [ 1245.665019] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1245.665019] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Neutron server returns request_ids: ['req-e3ae4bf2-f137-4ab6-86f8-742bcb1cf37b'] [ 1245.665019] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] [ 1245.665019] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] During handling of the above exception, another exception occurred: [ 1245.665019] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] [ 1245.665019] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Traceback (most recent call last): [ 1245.665019] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/compute/manager.py", line 3036, in _cleanup_allocated_networks [ 1245.665019] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] self._deallocate_network(context, instance, requested_networks) [ 1245.665019] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1245.665019] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] self.network_api.deallocate_for_instance( [ 1245.665019] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1245.667216] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] data = neutron.list_ports(**search_opts) [ 1245.667216] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1245.667216] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] ret = obj(*args, **kwargs) [ 1245.667216] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1245.667216] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] return self.list('ports', self.ports_path, retrieve_all, [ 1245.667216] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1245.667216] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] ret = obj(*args, **kwargs) [ 1245.667216] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1245.667216] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] for r in self._pagination(collection, path, **params): [ 1245.667216] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1245.667216] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] res = self.get(path, params=params) [ 1245.667216] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1245.667216] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] ret = obj(*args, **kwargs) [ 1245.667552] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1245.667552] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] return self.retry_request("GET", action, body=body, [ 1245.667552] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1245.667552] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] ret = obj(*args, **kwargs) [ 1245.667552] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1245.667552] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] return self.do_request(method, action, body=body, [ 1245.667552] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1245.667552] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] ret = obj(*args, **kwargs) [ 1245.667552] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1245.667552] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] self._handle_fault_response(status_code, replybody, resp) [ 1245.667552] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 1245.667552] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] raise exception.Unauthorized() [ 1245.667552] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] nova.exception.Unauthorized: Not authorized. [ 1245.667920] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] [ 1245.752615] env[68040]: INFO nova.scheduler.client.report [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Deleted allocations for instance d1819f29-a891-47dd-a456-8f3b127daf6f [ 1245.778653] env[68040]: DEBUG oslo_concurrency.lockutils [None req-771b25ab-9a7a-42ea-b691-f2971c024f2f tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Lock "d1819f29-a891-47dd-a456-8f3b127daf6f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 576.784s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1245.779885] env[68040]: DEBUG oslo_concurrency.lockutils [None req-f55598b6-2567-4a59-8df0-9afc2b7284d5 tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Lock "d1819f29-a891-47dd-a456-8f3b127daf6f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 373.184s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1245.780174] env[68040]: DEBUG oslo_concurrency.lockutils [None req-f55598b6-2567-4a59-8df0-9afc2b7284d5 tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Acquiring lock "d1819f29-a891-47dd-a456-8f3b127daf6f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1245.780326] env[68040]: DEBUG oslo_concurrency.lockutils [None req-f55598b6-2567-4a59-8df0-9afc2b7284d5 tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Lock "d1819f29-a891-47dd-a456-8f3b127daf6f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1245.780496] env[68040]: DEBUG oslo_concurrency.lockutils [None req-f55598b6-2567-4a59-8df0-9afc2b7284d5 tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Lock "d1819f29-a891-47dd-a456-8f3b127daf6f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1245.782803] env[68040]: INFO nova.compute.manager [None req-f55598b6-2567-4a59-8df0-9afc2b7284d5 tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Terminating instance [ 1245.784462] env[68040]: DEBUG nova.compute.manager [None req-f55598b6-2567-4a59-8df0-9afc2b7284d5 tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1245.784668] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-f55598b6-2567-4a59-8df0-9afc2b7284d5 tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1245.785295] env[68040]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b0a71c08-a496-446b-b0db-f2a9aeafb3cc {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.793701] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b9d7317-bf1c-4eb9-9741-4d57f3f2924b {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.806881] env[68040]: DEBUG nova.compute.manager [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1245.826837] env[68040]: WARNING nova.virt.vmwareapi.vmops [None req-f55598b6-2567-4a59-8df0-9afc2b7284d5 tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d1819f29-a891-47dd-a456-8f3b127daf6f could not be found. [ 1245.827085] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-f55598b6-2567-4a59-8df0-9afc2b7284d5 tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1245.827256] env[68040]: INFO nova.compute.manager [None req-f55598b6-2567-4a59-8df0-9afc2b7284d5 tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1245.827511] env[68040]: DEBUG oslo.service.loopingcall [None req-f55598b6-2567-4a59-8df0-9afc2b7284d5 tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1245.830383] env[68040]: DEBUG nova.compute.manager [-] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1245.830383] env[68040]: DEBUG nova.network.neutron [-] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1245.865446] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1245.990667] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e726621f-b856-41c4-a42e-bb535344aaec {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.001183] env[68040]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=68040) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1246.001183] env[68040]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1246.001183] env[68040]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1246.001183] env[68040]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1246.001183] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1246.001183] env[68040]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1246.001183] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1246.001183] env[68040]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1246.001183] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1246.001183] env[68040]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1246.001183] env[68040]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1246.001680] env[68040]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-cd0c41f3-7d45-41a3-88ca-92f91181fb74'] [ 1246.001680] env[68040]: ERROR oslo.service.loopingcall [ 1246.001680] env[68040]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1246.001680] env[68040]: ERROR oslo.service.loopingcall [ 1246.001680] env[68040]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1246.001680] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1246.001680] env[68040]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1246.001680] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1246.001680] env[68040]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1246.001680] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1246.001680] env[68040]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1246.001680] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1246.001680] env[68040]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1246.001680] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1246.001680] env[68040]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1246.001680] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1246.001680] env[68040]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1246.001680] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1246.001680] env[68040]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1246.002401] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1246.002401] env[68040]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1246.002401] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1246.002401] env[68040]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1246.002401] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1246.002401] env[68040]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1246.002401] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1246.002401] env[68040]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1246.002401] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1246.002401] env[68040]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1246.002401] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1246.002401] env[68040]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1246.002401] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1246.002401] env[68040]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1246.002401] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1246.002401] env[68040]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1246.002401] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1246.002401] env[68040]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1246.003125] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1246.003125] env[68040]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1246.003125] env[68040]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1246.003125] env[68040]: ERROR oslo.service.loopingcall [ 1246.003125] env[68040]: ERROR nova.compute.manager [None req-f55598b6-2567-4a59-8df0-9afc2b7284d5 tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1246.006801] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d3bb43b-05e1-4a60-8773-8dfe4313134a {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.037504] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ed0378e-4371-4d9c-9d83-cfb4162024b3 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.041679] env[68040]: ERROR nova.compute.manager [None req-f55598b6-2567-4a59-8df0-9afc2b7284d5 tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1246.041679] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Traceback (most recent call last): [ 1246.041679] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1246.041679] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] ret = obj(*args, **kwargs) [ 1246.041679] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1246.041679] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] exception_handler_v20(status_code, error_body) [ 1246.041679] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1246.041679] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] raise client_exc(message=error_message, [ 1246.041679] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1246.041679] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Neutron server returns request_ids: ['req-cd0c41f3-7d45-41a3-88ca-92f91181fb74'] [ 1246.041679] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] [ 1246.042219] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] During handling of the above exception, another exception occurred: [ 1246.042219] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] [ 1246.042219] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Traceback (most recent call last): [ 1246.042219] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1246.042219] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] self._delete_instance(context, instance, bdms) [ 1246.042219] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1246.042219] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] self._shutdown_instance(context, instance, bdms) [ 1246.042219] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1246.042219] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] self._try_deallocate_network(context, instance, requested_networks) [ 1246.042219] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1246.042219] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] with excutils.save_and_reraise_exception(): [ 1246.042219] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1246.042219] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] self.force_reraise() [ 1246.042650] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1246.042650] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] raise self.value [ 1246.042650] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1246.042650] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] _deallocate_network_with_retries() [ 1246.042650] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1246.042650] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] return evt.wait() [ 1246.042650] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1246.042650] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] result = hub.switch() [ 1246.042650] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1246.042650] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] return self.greenlet.switch() [ 1246.042650] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1246.042650] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] result = func(*self.args, **self.kw) [ 1246.043154] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1246.043154] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] result = f(*args, **kwargs) [ 1246.043154] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1246.043154] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] self._deallocate_network( [ 1246.043154] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1246.043154] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] self.network_api.deallocate_for_instance( [ 1246.043154] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1246.043154] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] data = neutron.list_ports(**search_opts) [ 1246.043154] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1246.043154] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] ret = obj(*args, **kwargs) [ 1246.043154] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1246.043154] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] return self.list('ports', self.ports_path, retrieve_all, [ 1246.043154] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1246.043574] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] ret = obj(*args, **kwargs) [ 1246.043574] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1246.043574] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] for r in self._pagination(collection, path, **params): [ 1246.043574] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1246.043574] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] res = self.get(path, params=params) [ 1246.043574] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1246.043574] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] ret = obj(*args, **kwargs) [ 1246.043574] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1246.043574] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] return self.retry_request("GET", action, body=body, [ 1246.043574] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1246.043574] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] ret = obj(*args, **kwargs) [ 1246.043574] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1246.043574] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] return self.do_request(method, action, body=body, [ 1246.043992] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1246.043992] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] ret = obj(*args, **kwargs) [ 1246.043992] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1246.043992] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] self._handle_fault_response(status_code, replybody, resp) [ 1246.043992] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1246.043992] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1246.043992] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1246.043992] env[68040]: ERROR nova.compute.manager [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] [ 1246.052811] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36aa0a70-1343-4336-9374-137f431c364b {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.066909] env[68040]: DEBUG nova.compute.provider_tree [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1246.072371] env[68040]: DEBUG oslo_concurrency.lockutils [None req-f55598b6-2567-4a59-8df0-9afc2b7284d5 tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Lock "d1819f29-a891-47dd-a456-8f3b127daf6f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.292s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1246.073492] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "d1819f29-a891-47dd-a456-8f3b127daf6f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 34.886s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1246.073735] env[68040]: INFO nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] During sync_power_state the instance has a pending task (deleting). Skip. [ 1246.073962] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "d1819f29-a891-47dd-a456-8f3b127daf6f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1246.075763] env[68040]: DEBUG nova.scheduler.client.report [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1246.088769] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.602s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1246.089258] env[68040]: DEBUG nova.compute.manager [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Start building networks asynchronously for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1246.091632] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.226s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1246.093018] env[68040]: INFO nova.compute.claims [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1246.137013] env[68040]: DEBUG nova.compute.utils [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Using /dev/sd instead of None {{(pid=68040) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1246.141209] env[68040]: DEBUG nova.compute.manager [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Allocating IP information in the background. {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1246.141428] env[68040]: DEBUG nova.network.neutron [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] allocate_for_instance() {{(pid=68040) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1246.146998] env[68040]: DEBUG nova.compute.manager [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Start building block device mappings for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1246.151553] env[68040]: INFO nova.compute.manager [None req-f55598b6-2567-4a59-8df0-9afc2b7284d5 tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] [instance: d1819f29-a891-47dd-a456-8f3b127daf6f] Successfully reverted task state from None on failure for instance. [ 1246.157437] env[68040]: ERROR oslo_messaging.rpc.server [None req-f55598b6-2567-4a59-8df0-9afc2b7284d5 tempest-ListImageFiltersTestJSON-22395781 tempest-ListImageFiltersTestJSON-22395781-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1246.157437] env[68040]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1246.157437] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1246.157437] env[68040]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1246.157437] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1246.157437] env[68040]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1246.157437] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1246.157437] env[68040]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1246.157437] env[68040]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1246.157437] env[68040]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-cd0c41f3-7d45-41a3-88ca-92f91181fb74'] [ 1246.157437] env[68040]: ERROR oslo_messaging.rpc.server [ 1246.157437] env[68040]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1246.157437] env[68040]: ERROR oslo_messaging.rpc.server [ 1246.157437] env[68040]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1246.157437] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1246.158015] env[68040]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1246.158015] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1246.158015] env[68040]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1246.158015] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1246.158015] env[68040]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1246.158015] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1246.158015] env[68040]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1246.158015] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1246.158015] env[68040]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1246.158015] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1246.158015] env[68040]: ERROR oslo_messaging.rpc.server raise self.value [ 1246.158015] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1246.158015] env[68040]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1246.158015] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1246.158015] env[68040]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1246.158015] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1246.158015] env[68040]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1246.158015] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1246.158508] env[68040]: ERROR oslo_messaging.rpc.server raise self.value [ 1246.158508] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1246.158508] env[68040]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1246.158508] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1246.158508] env[68040]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1246.158508] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1246.158508] env[68040]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1246.158508] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1246.158508] env[68040]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1246.158508] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1246.158508] env[68040]: ERROR oslo_messaging.rpc.server raise self.value [ 1246.158508] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1246.158508] env[68040]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1246.158508] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3344, in terminate_instance [ 1246.158508] env[68040]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1246.158508] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1246.158508] env[68040]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1246.158508] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3339, in do_terminate_instance [ 1246.159085] env[68040]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1246.159085] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1246.159085] env[68040]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1246.159085] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1246.159085] env[68040]: ERROR oslo_messaging.rpc.server raise self.value [ 1246.159085] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1246.159085] env[68040]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1246.159085] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1246.159085] env[68040]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1246.159085] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1246.159085] env[68040]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1246.159085] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1246.159085] env[68040]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1246.159085] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1246.159085] env[68040]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1246.159085] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1246.159085] env[68040]: ERROR oslo_messaging.rpc.server raise self.value [ 1246.159085] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1246.159617] env[68040]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1246.159617] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1246.159617] env[68040]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1246.159617] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1246.159617] env[68040]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1246.159617] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1246.159617] env[68040]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1246.159617] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1246.159617] env[68040]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1246.159617] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1246.159617] env[68040]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1246.159617] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1246.159617] env[68040]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1246.159617] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1246.159617] env[68040]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1246.159617] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1246.159617] env[68040]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1246.159617] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1246.160247] env[68040]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1246.160247] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1246.160247] env[68040]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1246.160247] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1246.160247] env[68040]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1246.160247] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1246.160247] env[68040]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1246.160247] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1246.160247] env[68040]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1246.160247] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1246.160247] env[68040]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1246.160247] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1246.160247] env[68040]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1246.160247] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1246.160247] env[68040]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1246.160247] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1246.160247] env[68040]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1246.160247] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1246.160776] env[68040]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1246.160776] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1246.160776] env[68040]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1246.160776] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1246.160776] env[68040]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1246.160776] env[68040]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1246.160776] env[68040]: ERROR oslo_messaging.rpc.server [ 1246.222673] env[68040]: DEBUG nova.compute.manager [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Start spawning the instance on the hypervisor. {{(pid=68040) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1246.226435] env[68040]: DEBUG nova.policy [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '083a37c9e8d34f868e5f29614dccba8d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '354d55d28e0e4ac4a68f2995344469be', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68040) authorize /opt/stack/nova/nova/policy.py:203}} [ 1246.252060] env[68040]: DEBUG nova.virt.hardware [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:59:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:59:33Z,direct_url=,disk_format='vmdk',id=8c308313-03d5-40b6-a5fe-9037e32dc76e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0770d674a39c40089de0aade9440b370',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:59:34Z,virtual_size=,visibility=), allow threads: False {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1246.252060] env[68040]: DEBUG nova.virt.hardware [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Flavor limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1246.252060] env[68040]: DEBUG nova.virt.hardware [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Image limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1246.252325] env[68040]: DEBUG nova.virt.hardware [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Flavor pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1246.252397] env[68040]: DEBUG nova.virt.hardware [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Image pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1246.252551] env[68040]: DEBUG nova.virt.hardware [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1246.252766] env[68040]: DEBUG nova.virt.hardware [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1246.252926] env[68040]: DEBUG nova.virt.hardware [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1246.253111] env[68040]: DEBUG nova.virt.hardware [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Got 1 possible topologies {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1246.253281] env[68040]: DEBUG nova.virt.hardware [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1246.253455] env[68040]: DEBUG nova.virt.hardware [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1246.254342] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-159bf301-b5a1-4004-a57d-b80726dfd511 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.266940] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe5fac25-5fc1-46c9-9ec8-a658f20302ef {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.445252] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a50edf5-a26c-4344-849c-b32426626742 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.452579] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fb56315-1196-4868-b7ae-a0886ea8862e {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.481793] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d232f2ee-04ac-487b-bbc4-119a031faad1 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.488911] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4663a9f7-b2c9-4fa9-9c68-97a944eb6428 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.504010] env[68040]: DEBUG nova.compute.provider_tree [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1246.516690] env[68040]: DEBUG nova.scheduler.client.report [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1246.533876] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.442s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1246.534392] env[68040]: DEBUG nova.compute.manager [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Start building networks asynchronously for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1246.575761] env[68040]: DEBUG nova.compute.utils [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Using /dev/sd instead of None {{(pid=68040) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1246.577519] env[68040]: DEBUG nova.compute.manager [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Allocating IP information in the background. {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1246.577775] env[68040]: DEBUG nova.network.neutron [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] allocate_for_instance() {{(pid=68040) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1246.595536] env[68040]: DEBUG nova.compute.manager [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Start building block device mappings for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1246.610568] env[68040]: DEBUG nova.network.neutron [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Successfully created port: b630aa45-0a74-41ad-af18-f44b96ff03fe {{(pid=68040) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1246.674053] env[68040]: DEBUG nova.compute.manager [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Start spawning the instance on the hypervisor. {{(pid=68040) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1246.708619] env[68040]: DEBUG nova.virt.hardware [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:59:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:59:33Z,direct_url=,disk_format='vmdk',id=8c308313-03d5-40b6-a5fe-9037e32dc76e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0770d674a39c40089de0aade9440b370',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:59:34Z,virtual_size=,visibility=), allow threads: False {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1246.709039] env[68040]: DEBUG nova.virt.hardware [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Flavor limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1246.709223] env[68040]: DEBUG nova.virt.hardware [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Image limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1246.709511] env[68040]: DEBUG nova.virt.hardware [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Flavor pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1246.709589] env[68040]: DEBUG nova.virt.hardware [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Image pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1246.709709] env[68040]: DEBUG nova.virt.hardware [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1246.709924] env[68040]: DEBUG nova.virt.hardware [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1246.710315] env[68040]: DEBUG nova.virt.hardware [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1246.710315] env[68040]: DEBUG nova.virt.hardware [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Got 1 possible topologies {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1246.710425] env[68040]: DEBUG nova.virt.hardware [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1246.710657] env[68040]: DEBUG nova.virt.hardware [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1246.711861] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c560dba-5066-41b8-a182-d1844524c951 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.721590] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2711ceb9-9f36-4bdd-ae5f-52110aa8cf2c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.885262] env[68040]: DEBUG nova.policy [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9f54d98e83e843f881225ab50c6bfa89', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6d8c0d1ea9f34b4b82469cb512fe4696', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68040) authorize /opt/stack/nova/nova/policy.py:203}} [ 1247.263190] env[68040]: DEBUG nova.network.neutron [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Successfully updated port: b630aa45-0a74-41ad-af18-f44b96ff03fe {{(pid=68040) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1247.279633] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Acquiring lock "refresh_cache-57cd94c2-aec3-427e-9b9f-a444fe291974" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1247.279802] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Acquired lock "refresh_cache-57cd94c2-aec3-427e-9b9f-a444fe291974" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1247.279956] env[68040]: DEBUG nova.network.neutron [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Building network info cache for instance {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1247.324039] env[68040]: DEBUG nova.network.neutron [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1247.351376] env[68040]: DEBUG nova.compute.manager [req-6dba3cde-2868-4418-9dc2-877081e76963 req-45debf27-ca19-4b56-b516-f6277a73d2f9 service nova] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Received event network-vif-plugged-b630aa45-0a74-41ad-af18-f44b96ff03fe {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1247.351376] env[68040]: DEBUG oslo_concurrency.lockutils [req-6dba3cde-2868-4418-9dc2-877081e76963 req-45debf27-ca19-4b56-b516-f6277a73d2f9 service nova] Acquiring lock "57cd94c2-aec3-427e-9b9f-a444fe291974-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1247.351376] env[68040]: DEBUG oslo_concurrency.lockutils [req-6dba3cde-2868-4418-9dc2-877081e76963 req-45debf27-ca19-4b56-b516-f6277a73d2f9 service nova] Lock "57cd94c2-aec3-427e-9b9f-a444fe291974-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1247.351376] env[68040]: DEBUG oslo_concurrency.lockutils [req-6dba3cde-2868-4418-9dc2-877081e76963 req-45debf27-ca19-4b56-b516-f6277a73d2f9 service nova] Lock "57cd94c2-aec3-427e-9b9f-a444fe291974-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1247.351645] env[68040]: DEBUG nova.compute.manager [req-6dba3cde-2868-4418-9dc2-877081e76963 req-45debf27-ca19-4b56-b516-f6277a73d2f9 service nova] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] No waiting events found dispatching network-vif-plugged-b630aa45-0a74-41ad-af18-f44b96ff03fe {{(pid=68040) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1247.351645] env[68040]: WARNING nova.compute.manager [req-6dba3cde-2868-4418-9dc2-877081e76963 req-45debf27-ca19-4b56-b516-f6277a73d2f9 service nova] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Received unexpected event network-vif-plugged-b630aa45-0a74-41ad-af18-f44b96ff03fe for instance with vm_state building and task_state spawning. [ 1247.514545] env[68040]: DEBUG nova.network.neutron [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Successfully created port: af033d89-b977-4235-bb0c-e6278290dda2 {{(pid=68040) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1247.556018] env[68040]: DEBUG nova.network.neutron [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Updating instance_info_cache with network_info: [{"id": "b630aa45-0a74-41ad-af18-f44b96ff03fe", "address": "fa:16:3e:7f:c4:10", "network": {"id": "ca1a84af-ab33-497c-8767-fd4463c076be", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.128", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "0770d674a39c40089de0aade9440b370", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb630aa45-0a", "ovs_interfaceid": "b630aa45-0a74-41ad-af18-f44b96ff03fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1247.564972] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Releasing lock "refresh_cache-57cd94c2-aec3-427e-9b9f-a444fe291974" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1247.565345] env[68040]: DEBUG nova.compute.manager [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Instance network_info: |[{"id": "b630aa45-0a74-41ad-af18-f44b96ff03fe", "address": "fa:16:3e:7f:c4:10", "network": {"id": "ca1a84af-ab33-497c-8767-fd4463c076be", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.128", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "0770d674a39c40089de0aade9440b370", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb630aa45-0a", "ovs_interfaceid": "b630aa45-0a74-41ad-af18-f44b96ff03fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1247.568612] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7f:c4:10', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7e0240aa-a694-48fc-a0f9-6f2d3e71aa12', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b630aa45-0a74-41ad-af18-f44b96ff03fe', 'vif_model': 'vmxnet3'}] {{(pid=68040) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1247.578172] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Creating folder: Project (354d55d28e0e4ac4a68f2995344469be). Parent ref: group-v639956. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1247.578843] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1ae03ce4-3b10-4a37-9f61-7335d7b9608e {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.589681] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Created folder: Project (354d55d28e0e4ac4a68f2995344469be) in parent group-v639956. [ 1247.589893] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Creating folder: Instances. Parent ref: group-v640026. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1247.590143] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ff5e774f-09ca-411b-8b03-983998dbb840 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.599054] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Created folder: Instances in parent group-v640026. [ 1247.599308] env[68040]: DEBUG oslo.service.loopingcall [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1247.599498] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Creating VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1247.599697] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8432a6ba-9846-4d42-a35d-2b927ca7222d {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.618494] env[68040]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1247.618494] env[68040]: value = "task-3200263" [ 1247.618494] env[68040]: _type = "Task" [ 1247.618494] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1247.625551] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200263, 'name': CreateVM_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.127832] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200263, 'name': CreateVM_Task, 'duration_secs': 0.272384} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1248.128011] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Created VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1248.128719] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1248.128911] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1248.129214] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1248.129453] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d7c0154-401d-4c7a-bf2c-eadfb36614ea {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.134244] env[68040]: DEBUG oslo_vmware.api [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Waiting for the task: (returnval){ [ 1248.134244] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]528375cd-9b42-fb0b-0f21-79dbdb5ad839" [ 1248.134244] env[68040]: _type = "Task" [ 1248.134244] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.142948] env[68040]: DEBUG oslo_vmware.api [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]528375cd-9b42-fb0b-0f21-79dbdb5ad839, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.416067] env[68040]: DEBUG nova.compute.manager [req-68ea37c2-d3f3-4777-9f2d-ac3ad60fb582 req-591e27cf-562d-49fd-b697-37b4cb407cf2 service nova] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Received event network-vif-plugged-af033d89-b977-4235-bb0c-e6278290dda2 {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1248.416511] env[68040]: DEBUG oslo_concurrency.lockutils [req-68ea37c2-d3f3-4777-9f2d-ac3ad60fb582 req-591e27cf-562d-49fd-b697-37b4cb407cf2 service nova] Acquiring lock "87a7851e-d6fe-481a-8abb-5732e281cb64-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1248.416700] env[68040]: DEBUG oslo_concurrency.lockutils [req-68ea37c2-d3f3-4777-9f2d-ac3ad60fb582 req-591e27cf-562d-49fd-b697-37b4cb407cf2 service nova] Lock "87a7851e-d6fe-481a-8abb-5732e281cb64-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1248.416948] env[68040]: DEBUG oslo_concurrency.lockutils [req-68ea37c2-d3f3-4777-9f2d-ac3ad60fb582 req-591e27cf-562d-49fd-b697-37b4cb407cf2 service nova] Lock "87a7851e-d6fe-481a-8abb-5732e281cb64-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1248.418035] env[68040]: DEBUG nova.compute.manager [req-68ea37c2-d3f3-4777-9f2d-ac3ad60fb582 req-591e27cf-562d-49fd-b697-37b4cb407cf2 service nova] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] No waiting events found dispatching network-vif-plugged-af033d89-b977-4235-bb0c-e6278290dda2 {{(pid=68040) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1248.418035] env[68040]: WARNING nova.compute.manager [req-68ea37c2-d3f3-4777-9f2d-ac3ad60fb582 req-591e27cf-562d-49fd-b697-37b4cb407cf2 service nova] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Received unexpected event network-vif-plugged-af033d89-b977-4235-bb0c-e6278290dda2 for instance with vm_state building and task_state spawning. [ 1248.509863] env[68040]: DEBUG nova.network.neutron [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Successfully updated port: af033d89-b977-4235-bb0c-e6278290dda2 {{(pid=68040) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1248.521819] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Acquiring lock "refresh_cache-87a7851e-d6fe-481a-8abb-5732e281cb64" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1248.521979] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Acquired lock "refresh_cache-87a7851e-d6fe-481a-8abb-5732e281cb64" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1248.522153] env[68040]: DEBUG nova.network.neutron [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Building network info cache for instance {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1248.587202] env[68040]: DEBUG nova.network.neutron [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1248.644757] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1248.645042] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Processing image 8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1248.645271] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1248.793963] env[68040]: DEBUG nova.network.neutron [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Updating instance_info_cache with network_info: [{"id": "af033d89-b977-4235-bb0c-e6278290dda2", "address": "fa:16:3e:6a:7d:e0", "network": {"id": "d3c3bbf4-ff6a-4caa-9ff8-f634f43ea313", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-426900683-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d8c0d1ea9f34b4b82469cb512fe4696", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf033d89-b9", "ovs_interfaceid": "af033d89-b977-4235-bb0c-e6278290dda2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1248.806482] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Releasing lock "refresh_cache-87a7851e-d6fe-481a-8abb-5732e281cb64" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1248.806794] env[68040]: DEBUG nova.compute.manager [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Instance network_info: |[{"id": "af033d89-b977-4235-bb0c-e6278290dda2", "address": "fa:16:3e:6a:7d:e0", "network": {"id": "d3c3bbf4-ff6a-4caa-9ff8-f634f43ea313", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-426900683-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d8c0d1ea9f34b4b82469cb512fe4696", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf033d89-b9", "ovs_interfaceid": "af033d89-b977-4235-bb0c-e6278290dda2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1248.807229] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6a:7d:e0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a91c3a96-63d0-407c-bcde-c3d5b58d9cb2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'af033d89-b977-4235-bb0c-e6278290dda2', 'vif_model': 'vmxnet3'}] {{(pid=68040) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1248.816399] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Creating folder: Project (6d8c0d1ea9f34b4b82469cb512fe4696). Parent ref: group-v639956. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1248.816952] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-138e20a1-1ba6-4150-a115-75c5c75fdf6c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.828192] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Created folder: Project (6d8c0d1ea9f34b4b82469cb512fe4696) in parent group-v639956. [ 1248.828378] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Creating folder: Instances. Parent ref: group-v640029. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1248.828687] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e3614319-3805-4904-a726-f54176ae15ec {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.836872] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Created folder: Instances in parent group-v640029. [ 1248.837117] env[68040]: DEBUG oslo.service.loopingcall [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1248.837328] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Creating VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1248.837532] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-aea5bb7d-9cad-4ec3-a05a-d5da362154cd {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.857049] env[68040]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1248.857049] env[68040]: value = "task-3200266" [ 1248.857049] env[68040]: _type = "Task" [ 1248.857049] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.864535] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200266, 'name': CreateVM_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.367807] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200266, 'name': CreateVM_Task, 'duration_secs': 0.377808} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.367991] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Created VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1249.368768] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1249.368866] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1249.369183] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1249.369448] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-65eec1c2-fdfa-4bd3-b386-8f46a1eb0bcd {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.374315] env[68040]: DEBUG oslo_vmware.api [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Waiting for the task: (returnval){ [ 1249.374315] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]5210947d-8120-ebab-b757-c2ac8eb633cc" [ 1249.374315] env[68040]: _type = "Task" [ 1249.374315] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.379681] env[68040]: DEBUG nova.compute.manager [req-9267b08d-97e8-4a62-8f25-77aa36a8237a req-025ece83-cd96-46b7-86b9-8fa974f56056 service nova] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Received event network-changed-b630aa45-0a74-41ad-af18-f44b96ff03fe {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1249.379946] env[68040]: DEBUG nova.compute.manager [req-9267b08d-97e8-4a62-8f25-77aa36a8237a req-025ece83-cd96-46b7-86b9-8fa974f56056 service nova] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Refreshing instance network info cache due to event network-changed-b630aa45-0a74-41ad-af18-f44b96ff03fe. {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1249.380199] env[68040]: DEBUG oslo_concurrency.lockutils [req-9267b08d-97e8-4a62-8f25-77aa36a8237a req-025ece83-cd96-46b7-86b9-8fa974f56056 service nova] Acquiring lock "refresh_cache-57cd94c2-aec3-427e-9b9f-a444fe291974" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1249.380355] env[68040]: DEBUG oslo_concurrency.lockutils [req-9267b08d-97e8-4a62-8f25-77aa36a8237a req-025ece83-cd96-46b7-86b9-8fa974f56056 service nova] Acquired lock "refresh_cache-57cd94c2-aec3-427e-9b9f-a444fe291974" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1249.380525] env[68040]: DEBUG nova.network.neutron [req-9267b08d-97e8-4a62-8f25-77aa36a8237a req-025ece83-cd96-46b7-86b9-8fa974f56056 service nova] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Refreshing network info cache for port b630aa45-0a74-41ad-af18-f44b96ff03fe {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1249.384953] env[68040]: DEBUG oslo_vmware.api [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]5210947d-8120-ebab-b757-c2ac8eb633cc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.884873] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1249.885230] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Processing image 8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1249.885388] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1250.013332] env[68040]: DEBUG nova.network.neutron [req-9267b08d-97e8-4a62-8f25-77aa36a8237a req-025ece83-cd96-46b7-86b9-8fa974f56056 service nova] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Updated VIF entry in instance network info cache for port b630aa45-0a74-41ad-af18-f44b96ff03fe. {{(pid=68040) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1250.013715] env[68040]: DEBUG nova.network.neutron [req-9267b08d-97e8-4a62-8f25-77aa36a8237a req-025ece83-cd96-46b7-86b9-8fa974f56056 service nova] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Updating instance_info_cache with network_info: [{"id": "b630aa45-0a74-41ad-af18-f44b96ff03fe", "address": "fa:16:3e:7f:c4:10", "network": {"id": "ca1a84af-ab33-497c-8767-fd4463c076be", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.128", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "0770d674a39c40089de0aade9440b370", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb630aa45-0a", "ovs_interfaceid": "b630aa45-0a74-41ad-af18-f44b96ff03fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1250.026382] env[68040]: DEBUG oslo_concurrency.lockutils [req-9267b08d-97e8-4a62-8f25-77aa36a8237a req-025ece83-cd96-46b7-86b9-8fa974f56056 service nova] Releasing lock "refresh_cache-57cd94c2-aec3-427e-9b9f-a444fe291974" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1250.440153] env[68040]: DEBUG nova.compute.manager [req-996f1f6a-e16c-42c8-a224-6b70d951dce4 req-706c2173-4353-451d-93c4-496f5aed22c2 service nova] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Received event network-changed-af033d89-b977-4235-bb0c-e6278290dda2 {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1250.440259] env[68040]: DEBUG nova.compute.manager [req-996f1f6a-e16c-42c8-a224-6b70d951dce4 req-706c2173-4353-451d-93c4-496f5aed22c2 service nova] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Refreshing instance network info cache due to event network-changed-af033d89-b977-4235-bb0c-e6278290dda2. {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1250.440472] env[68040]: DEBUG oslo_concurrency.lockutils [req-996f1f6a-e16c-42c8-a224-6b70d951dce4 req-706c2173-4353-451d-93c4-496f5aed22c2 service nova] Acquiring lock "refresh_cache-87a7851e-d6fe-481a-8abb-5732e281cb64" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1250.440621] env[68040]: DEBUG oslo_concurrency.lockutils [req-996f1f6a-e16c-42c8-a224-6b70d951dce4 req-706c2173-4353-451d-93c4-496f5aed22c2 service nova] Acquired lock "refresh_cache-87a7851e-d6fe-481a-8abb-5732e281cb64" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1250.440788] env[68040]: DEBUG nova.network.neutron [req-996f1f6a-e16c-42c8-a224-6b70d951dce4 req-706c2173-4353-451d-93c4-496f5aed22c2 service nova] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Refreshing network info cache for port af033d89-b977-4235-bb0c-e6278290dda2 {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1250.774923] env[68040]: DEBUG nova.network.neutron [req-996f1f6a-e16c-42c8-a224-6b70d951dce4 req-706c2173-4353-451d-93c4-496f5aed22c2 service nova] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Updated VIF entry in instance network info cache for port af033d89-b977-4235-bb0c-e6278290dda2. {{(pid=68040) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1250.775350] env[68040]: DEBUG nova.network.neutron [req-996f1f6a-e16c-42c8-a224-6b70d951dce4 req-706c2173-4353-451d-93c4-496f5aed22c2 service nova] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Updating instance_info_cache with network_info: [{"id": "af033d89-b977-4235-bb0c-e6278290dda2", "address": "fa:16:3e:6a:7d:e0", "network": {"id": "d3c3bbf4-ff6a-4caa-9ff8-f634f43ea313", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-426900683-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6d8c0d1ea9f34b4b82469cb512fe4696", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf033d89-b9", "ovs_interfaceid": "af033d89-b977-4235-bb0c-e6278290dda2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1250.785540] env[68040]: DEBUG oslo_concurrency.lockutils [req-996f1f6a-e16c-42c8-a224-6b70d951dce4 req-706c2173-4353-451d-93c4-496f5aed22c2 service nova] Releasing lock "refresh_cache-87a7851e-d6fe-481a-8abb-5732e281cb64" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1252.947353] env[68040]: DEBUG oslo_concurrency.lockutils [None req-40c72093-5de6-4ea1-81fd-f1034c981b6f tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Acquiring lock "b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1260.117718] env[68040]: DEBUG oslo_concurrency.lockutils [None req-3c6c6b5e-efcd-4305-af71-79518916aed8 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Acquiring lock "e92b662c-b458-49d8-ac2a-00ae6046a11b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1266.497759] env[68040]: DEBUG oslo_concurrency.lockutils [None req-24a890f8-ebec-46a5-8009-8a91cda7c5bf tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Acquiring lock "57cd94c2-aec3-427e-9b9f-a444fe291974" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1266.984304] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1267.415534] env[68040]: DEBUG oslo_concurrency.lockutils [None req-34745a56-f48b-48e5-ad4e-cebcb2d024c9 tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Acquiring lock "87a7851e-d6fe-481a-8abb-5732e281cb64" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1267.983216] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1267.983571] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1267.996488] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1267.996730] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1267.996918] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1267.997104] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68040) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1267.998229] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74a5654c-6df5-4459-b4f5-7ae7efa28b3e {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.007098] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1552544-0014-413f-8206-731eef23883f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.020658] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9196338-b223-4ab3-ae66-63634a128685 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.026693] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99a0f5ad-5841-49f4-bd8e-22a7fffaa639 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.054290] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180927MB free_disk=125GB free_vcpus=48 pci_devices=None {{(pid=68040) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1268.054439] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1268.054630] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1268.128679] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 1812f13e-b03d-48d4-940a-43974784265b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1268.128855] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance de1b8ef9-0088-4d2a-985e-d04fcff55d31 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1268.128987] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance bce68a2b-260c-45cc-ac98-d4b01b4513a4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1268.129127] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 1e43f6be-f6a3-4569-adea-c82a5d709247 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1268.129249] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 3738de32-79cd-4b04-8081-cc1146730c75 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1268.129369] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1268.129486] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1268.129615] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance e92b662c-b458-49d8-ac2a-00ae6046a11b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1268.129723] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 57cd94c2-aec3-427e-9b9f-a444fe291974 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1268.129838] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 87a7851e-d6fe-481a-8abb-5732e281cb64 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1268.140881] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance bd14d08b-d71a-43b0-b72a-6504dc0e2142 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1268.150715] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 00305041-c0c0-4b7b-9149-9bcba4392279 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1268.160564] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 6011cb9b-e882-4eb2-96b4-82a43585acbc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1268.170446] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 81cfab4f-6a32-42b0-bbfc-45596bc9ad4e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1268.180714] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance f6edf79d-5eff-4e2c-94d2-aa5cf1731748 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1268.190895] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance c581d685-7ea0-41f8-b911-ff1dce1b46c7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1268.201081] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 310e3ad1-aa4c-44d1-b1e9-152d1de39125 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1268.212775] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 72f5f3ba-c931-40a5-ab73-4e6738e0aaba has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1268.223023] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance c2f603dd-6a9b-4a0f-b50a-263cf8eb70af has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1268.232246] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance ec66dda9-4e56-4baa-b8aa-8b01f28d8e9c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1268.241933] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 8535d103-7bdf-4210-aa1e-180bb100de5f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1268.251529] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 462c8f10-1dda-4687-946c-fb40c3e4f049 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1268.261773] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance e54d02e5-1e98-4e9d-93e7-bcccfa3307e1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1268.271834] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 4a08d3e3-5e84-4f34-b418-2c18eadbef25 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1268.271834] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1268.271834] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1268.541912] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bde2869-3fa5-4dfd-b838-1557f9a37217 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.549496] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27f732da-5616-4eb5-aff5-0aef28fca836 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.578292] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26e7d00d-6d26-4a8d-a76d-87f83cb66878 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.585728] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da7d3c3b-1442-4aef-9b95-7c9aa131b82f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.599625] env[68040]: DEBUG nova.compute.provider_tree [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1268.608409] env[68040]: DEBUG nova.scheduler.client.report [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1268.625422] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68040) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1268.625610] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.571s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1269.626214] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1269.626494] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Starting heal instance info cache {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1269.626545] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Rebuilding the list of instances to heal {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1269.648396] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1269.648566] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1269.648767] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1269.648907] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1269.649045] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1269.649174] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1269.649295] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1269.649412] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1269.649535] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1269.649727] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1269.649873] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Didn't find any instances for network info cache update. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1269.650383] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1269.984188] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1269.984426] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1270.979770] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1273.984016] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1273.984322] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68040) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1279.368249] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Acquiring lock "268b5613-b132-49ed-a45b-bc88132177cf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1279.368623] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Lock "268b5613-b132-49ed-a45b-bc88132177cf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1280.172115] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Acquiring lock "4dfa01f8-53a0-4ee4-9b00-93017144ea0b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1280.172370] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Lock "4dfa01f8-53a0-4ee4-9b00-93017144ea0b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1282.295332] env[68040]: DEBUG oslo_concurrency.lockutils [None req-78f35d23-63af-45ac-8ec0-4daa73a597f6 tempest-AttachInterfacesTestJSON-1449330779 tempest-AttachInterfacesTestJSON-1449330779-project-member] Acquiring lock "4ce0934f-8277-4029-8a0c-77468ee9b6dc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1282.295615] env[68040]: DEBUG oslo_concurrency.lockutils [None req-78f35d23-63af-45ac-8ec0-4daa73a597f6 tempest-AttachInterfacesTestJSON-1449330779 tempest-AttachInterfacesTestJSON-1449330779-project-member] Lock "4ce0934f-8277-4029-8a0c-77468ee9b6dc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1293.526596] env[68040]: WARNING oslo_vmware.rw_handles [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1293.526596] env[68040]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1293.526596] env[68040]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1293.526596] env[68040]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1293.526596] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1293.526596] env[68040]: ERROR oslo_vmware.rw_handles response.begin() [ 1293.526596] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1293.526596] env[68040]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1293.526596] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1293.526596] env[68040]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1293.526596] env[68040]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1293.526596] env[68040]: ERROR oslo_vmware.rw_handles [ 1293.527302] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Downloaded image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to vmware_temp/39a2b58f-3751-4b41-88ed-3232d5eee879/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1293.528885] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Caching image {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1293.529108] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Copying Virtual Disk [datastore2] vmware_temp/39a2b58f-3751-4b41-88ed-3232d5eee879/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk to [datastore2] vmware_temp/39a2b58f-3751-4b41-88ed-3232d5eee879/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk {{(pid=68040) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1293.529396] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a0ec0f49-6504-4d32-a68d-ab0b98969d42 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.537238] env[68040]: DEBUG oslo_vmware.api [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Waiting for the task: (returnval){ [ 1293.537238] env[68040]: value = "task-3200267" [ 1293.537238] env[68040]: _type = "Task" [ 1293.537238] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1293.545873] env[68040]: DEBUG oslo_vmware.api [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Task: {'id': task-3200267, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.048089] env[68040]: DEBUG oslo_vmware.exceptions [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Fault InvalidArgument not matched. {{(pid=68040) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1294.048393] env[68040]: DEBUG oslo_concurrency.lockutils [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1294.049011] env[68040]: ERROR nova.compute.manager [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1294.049011] env[68040]: Faults: ['InvalidArgument'] [ 1294.049011] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] Traceback (most recent call last): [ 1294.049011] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1294.049011] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] yield resources [ 1294.049011] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1294.049011] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] self.driver.spawn(context, instance, image_meta, [ 1294.049011] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1294.049011] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1294.049011] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1294.049011] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] self._fetch_image_if_missing(context, vi) [ 1294.049011] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1294.049409] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] image_cache(vi, tmp_image_ds_loc) [ 1294.049409] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1294.049409] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] vm_util.copy_virtual_disk( [ 1294.049409] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1294.049409] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] session._wait_for_task(vmdk_copy_task) [ 1294.049409] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1294.049409] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] return self.wait_for_task(task_ref) [ 1294.049409] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1294.049409] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] return evt.wait() [ 1294.049409] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1294.049409] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] result = hub.switch() [ 1294.049409] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1294.049409] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] return self.greenlet.switch() [ 1294.049871] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1294.049871] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] self.f(*self.args, **self.kw) [ 1294.049871] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1294.049871] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] raise exceptions.translate_fault(task_info.error) [ 1294.049871] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1294.049871] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] Faults: ['InvalidArgument'] [ 1294.049871] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] [ 1294.049871] env[68040]: INFO nova.compute.manager [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Terminating instance [ 1294.050907] env[68040]: DEBUG oslo_concurrency.lockutils [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1294.051143] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1294.051376] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-77a8b32b-8eee-463a-86ba-5036283e3ed4 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.053711] env[68040]: DEBUG nova.compute.manager [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1294.053909] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1294.054621] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce7add41-be9a-4ea2-bf4f-25c0bfb18b89 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.061141] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Unregistering the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1294.061421] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-072114fa-617a-4e7e-a6f4-8b572fd0bcd2 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.063564] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1294.063744] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68040) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1294.064705] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f88d4fbc-1357-4ff2-93d3-5d2533af05de {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.069945] env[68040]: DEBUG oslo_vmware.api [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Waiting for the task: (returnval){ [ 1294.069945] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]5264f6c5-97de-14e1-6130-8ce62f84cd83" [ 1294.069945] env[68040]: _type = "Task" [ 1294.069945] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1294.077076] env[68040]: DEBUG oslo_vmware.api [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]5264f6c5-97de-14e1-6130-8ce62f84cd83, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.128121] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Unregistered the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1294.128398] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Deleting contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1294.128645] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Deleting the datastore file [datastore2] 1812f13e-b03d-48d4-940a-43974784265b {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1294.128955] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9e1334fd-eff8-48f5-a269-9951d878188a {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.135932] env[68040]: DEBUG oslo_vmware.api [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Waiting for the task: (returnval){ [ 1294.135932] env[68040]: value = "task-3200269" [ 1294.135932] env[68040]: _type = "Task" [ 1294.135932] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1294.143687] env[68040]: DEBUG oslo_vmware.api [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Task: {'id': task-3200269, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.580531] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Preparing fetch location {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1294.580873] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Creating directory with path [datastore2] vmware_temp/89195c32-3ebf-4f1c-853f-5a96dd3f415d/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1294.581063] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e151fcb9-42b8-4cf2-abd2-b4ab4b27164c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.592453] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Created directory with path [datastore2] vmware_temp/89195c32-3ebf-4f1c-853f-5a96dd3f415d/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1294.592655] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Fetch image to [datastore2] vmware_temp/89195c32-3ebf-4f1c-853f-5a96dd3f415d/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1294.592843] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to [datastore2] vmware_temp/89195c32-3ebf-4f1c-853f-5a96dd3f415d/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1294.593642] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-066c50d4-e407-4ac0-afe6-c4a84d30c9c5 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.601992] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fac50554-8265-4fce-b405-2405a4611939 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.610929] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c6e8584-e840-435e-bb71-2f4be28c6c82 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.645087] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d0c7a11-07d3-489f-a467-41c7a5068ea9 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.650164] env[68040]: DEBUG oslo_vmware.api [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Task: {'id': task-3200269, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.081887} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1294.651616] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Deleted the datastore file {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1294.653094] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Deleted contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1294.653094] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1294.653094] env[68040]: INFO nova.compute.manager [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1294.654191] env[68040]: DEBUG nova.compute.claims [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Aborting claim: {{(pid=68040) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1294.654368] env[68040]: DEBUG oslo_concurrency.lockutils [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1294.654582] env[68040]: DEBUG oslo_concurrency.lockutils [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1294.657408] env[68040]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-439e600b-340d-44be-bcda-1896999c8fb1 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.682019] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1294.758711] env[68040]: DEBUG oslo_vmware.rw_handles [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/89195c32-3ebf-4f1c-853f-5a96dd3f415d/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1294.825998] env[68040]: DEBUG oslo_vmware.rw_handles [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Completed reading data from the image iterator. {{(pid=68040) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1294.826227] env[68040]: DEBUG oslo_vmware.rw_handles [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/89195c32-3ebf-4f1c-853f-5a96dd3f415d/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1295.120168] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a981a0f-fb32-465c-8be6-e524752a7e02 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.125887] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97d0845b-a85b-484e-94a1-7b4781dae243 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.158559] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b6c98c5-30a5-4c14-864e-5eda8067f3a4 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.171027] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cade86fc-de66-49af-a88a-c88f4ee6e83c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.181274] env[68040]: DEBUG nova.compute.provider_tree [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1295.190844] env[68040]: DEBUG nova.scheduler.client.report [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1295.208119] env[68040]: DEBUG oslo_concurrency.lockutils [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.551s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1295.208119] env[68040]: ERROR nova.compute.manager [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1295.208119] env[68040]: Faults: ['InvalidArgument'] [ 1295.208119] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] Traceback (most recent call last): [ 1295.208119] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1295.208119] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] self.driver.spawn(context, instance, image_meta, [ 1295.208119] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1295.208119] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1295.208119] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1295.208119] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] self._fetch_image_if_missing(context, vi) [ 1295.208549] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1295.208549] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] image_cache(vi, tmp_image_ds_loc) [ 1295.208549] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1295.208549] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] vm_util.copy_virtual_disk( [ 1295.208549] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1295.208549] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] session._wait_for_task(vmdk_copy_task) [ 1295.208549] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1295.208549] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] return self.wait_for_task(task_ref) [ 1295.208549] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1295.208549] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] return evt.wait() [ 1295.208549] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1295.208549] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] result = hub.switch() [ 1295.208549] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1295.208954] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] return self.greenlet.switch() [ 1295.208954] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1295.208954] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] self.f(*self.args, **self.kw) [ 1295.208954] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1295.208954] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] raise exceptions.translate_fault(task_info.error) [ 1295.208954] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1295.208954] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] Faults: ['InvalidArgument'] [ 1295.208954] env[68040]: ERROR nova.compute.manager [instance: 1812f13e-b03d-48d4-940a-43974784265b] [ 1295.208954] env[68040]: DEBUG nova.compute.utils [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] [instance: 1812f13e-b03d-48d4-940a-43974784265b] VimFaultException {{(pid=68040) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1295.209854] env[68040]: DEBUG nova.compute.manager [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Build of instance 1812f13e-b03d-48d4-940a-43974784265b was re-scheduled: A specified parameter was not correct: fileType [ 1295.209854] env[68040]: Faults: ['InvalidArgument'] {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1295.210406] env[68040]: DEBUG nova.compute.manager [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Unplugging VIFs for instance {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1295.210712] env[68040]: DEBUG nova.compute.manager [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1295.211097] env[68040]: DEBUG nova.compute.manager [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1295.211403] env[68040]: DEBUG nova.network.neutron [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] [instance: 1812f13e-b03d-48d4-940a-43974784265b] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1295.908544] env[68040]: DEBUG nova.network.neutron [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1295.924850] env[68040]: INFO nova.compute.manager [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Took 0.71 seconds to deallocate network for instance. [ 1296.061525] env[68040]: INFO nova.scheduler.client.report [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Deleted allocations for instance 1812f13e-b03d-48d4-940a-43974784265b [ 1296.097791] env[68040]: DEBUG oslo_concurrency.lockutils [None req-dd25868b-9055-44be-afa0-ea34e78e5dee tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Lock "1812f13e-b03d-48d4-940a-43974784265b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 625.080s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1296.099539] env[68040]: DEBUG oslo_concurrency.lockutils [None req-bfba6510-551e-457e-b3e6-7b336ec2b617 tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Lock "1812f13e-b03d-48d4-940a-43974784265b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 424.940s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1296.099766] env[68040]: DEBUG oslo_concurrency.lockutils [None req-bfba6510-551e-457e-b3e6-7b336ec2b617 tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Acquiring lock "1812f13e-b03d-48d4-940a-43974784265b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1296.102030] env[68040]: DEBUG oslo_concurrency.lockutils [None req-bfba6510-551e-457e-b3e6-7b336ec2b617 tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Lock "1812f13e-b03d-48d4-940a-43974784265b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1296.102030] env[68040]: DEBUG oslo_concurrency.lockutils [None req-bfba6510-551e-457e-b3e6-7b336ec2b617 tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Lock "1812f13e-b03d-48d4-940a-43974784265b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1296.103191] env[68040]: INFO nova.compute.manager [None req-bfba6510-551e-457e-b3e6-7b336ec2b617 tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Terminating instance [ 1296.105813] env[68040]: DEBUG nova.compute.manager [None req-bfba6510-551e-457e-b3e6-7b336ec2b617 tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1296.105813] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-bfba6510-551e-457e-b3e6-7b336ec2b617 tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1296.105902] env[68040]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0adc8bea-3c9a-4a96-9919-46b4cd7b8e6a {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.115335] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-504b5631-b23f-467f-b7d4-9c1bdf7a0fe9 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.127452] env[68040]: DEBUG nova.compute.manager [None req-8f5a77d5-0a71-4401-b529-3c7fd1e6caca tempest-ServersTestJSON-1745355635 tempest-ServersTestJSON-1745355635-project-member] [instance: bd14d08b-d71a-43b0-b72a-6504dc0e2142] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1296.149932] env[68040]: WARNING nova.virt.vmwareapi.vmops [None req-bfba6510-551e-457e-b3e6-7b336ec2b617 tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1812f13e-b03d-48d4-940a-43974784265b could not be found. [ 1296.150182] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-bfba6510-551e-457e-b3e6-7b336ec2b617 tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1296.151234] env[68040]: INFO nova.compute.manager [None req-bfba6510-551e-457e-b3e6-7b336ec2b617 tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1296.151234] env[68040]: DEBUG oslo.service.loopingcall [None req-bfba6510-551e-457e-b3e6-7b336ec2b617 tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1296.151234] env[68040]: DEBUG nova.compute.manager [-] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1296.151234] env[68040]: DEBUG nova.network.neutron [-] [instance: 1812f13e-b03d-48d4-940a-43974784265b] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1296.159776] env[68040]: DEBUG nova.compute.manager [None req-8f5a77d5-0a71-4401-b529-3c7fd1e6caca tempest-ServersTestJSON-1745355635 tempest-ServersTestJSON-1745355635-project-member] [instance: bd14d08b-d71a-43b0-b72a-6504dc0e2142] Instance disappeared before build. {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1296.190356] env[68040]: DEBUG nova.network.neutron [-] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1296.192906] env[68040]: DEBUG oslo_concurrency.lockutils [None req-8f5a77d5-0a71-4401-b529-3c7fd1e6caca tempest-ServersTestJSON-1745355635 tempest-ServersTestJSON-1745355635-project-member] Lock "bd14d08b-d71a-43b0-b72a-6504dc0e2142" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 219.760s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1296.200434] env[68040]: INFO nova.compute.manager [-] [instance: 1812f13e-b03d-48d4-940a-43974784265b] Took 0.05 seconds to deallocate network for instance. [ 1296.211770] env[68040]: DEBUG nova.compute.manager [None req-618f0b27-75aa-434c-a2b6-3b8e177fbe65 tempest-AttachInterfacesTestJSON-1449330779 tempest-AttachInterfacesTestJSON-1449330779-project-member] [instance: 00305041-c0c0-4b7b-9149-9bcba4392279] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1296.243138] env[68040]: DEBUG nova.compute.manager [None req-618f0b27-75aa-434c-a2b6-3b8e177fbe65 tempest-AttachInterfacesTestJSON-1449330779 tempest-AttachInterfacesTestJSON-1449330779-project-member] [instance: 00305041-c0c0-4b7b-9149-9bcba4392279] Instance disappeared before build. {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1296.272656] env[68040]: DEBUG oslo_concurrency.lockutils [None req-618f0b27-75aa-434c-a2b6-3b8e177fbe65 tempest-AttachInterfacesTestJSON-1449330779 tempest-AttachInterfacesTestJSON-1449330779-project-member] Lock "00305041-c0c0-4b7b-9149-9bcba4392279" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 213.692s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1296.284701] env[68040]: DEBUG nova.compute.manager [None req-0be19fd9-696b-49de-bdcc-2dd65ee2b98f tempest-ServerRescueTestJSONUnderV235-1504598290 tempest-ServerRescueTestJSONUnderV235-1504598290-project-member] [instance: 6011cb9b-e882-4eb2-96b4-82a43585acbc] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1296.327034] env[68040]: DEBUG nova.compute.manager [None req-0be19fd9-696b-49de-bdcc-2dd65ee2b98f tempest-ServerRescueTestJSONUnderV235-1504598290 tempest-ServerRescueTestJSONUnderV235-1504598290-project-member] [instance: 6011cb9b-e882-4eb2-96b4-82a43585acbc] Instance disappeared before build. {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1296.345118] env[68040]: DEBUG oslo_concurrency.lockutils [None req-bfba6510-551e-457e-b3e6-7b336ec2b617 tempest-AttachInterfacesUnderV243Test-1219646347 tempest-AttachInterfacesUnderV243Test-1219646347-project-member] Lock "1812f13e-b03d-48d4-940a-43974784265b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.245s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1296.346411] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "1812f13e-b03d-48d4-940a-43974784265b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 85.158s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1296.350041] env[68040]: INFO nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 1812f13e-b03d-48d4-940a-43974784265b] During sync_power_state the instance has a pending task (deleting). Skip. [ 1296.350041] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "1812f13e-b03d-48d4-940a-43974784265b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1296.354346] env[68040]: DEBUG oslo_concurrency.lockutils [None req-0be19fd9-696b-49de-bdcc-2dd65ee2b98f tempest-ServerRescueTestJSONUnderV235-1504598290 tempest-ServerRescueTestJSONUnderV235-1504598290-project-member] Lock "6011cb9b-e882-4eb2-96b4-82a43585acbc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 212.797s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1296.368260] env[68040]: DEBUG nova.compute.manager [None req-3fb08628-7aa9-4881-bc32-83c863bc6352 tempest-ServersV294TestFqdnHostnames-341872804 tempest-ServersV294TestFqdnHostnames-341872804-project-member] [instance: 81cfab4f-6a32-42b0-bbfc-45596bc9ad4e] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1296.400586] env[68040]: DEBUG nova.compute.manager [None req-3fb08628-7aa9-4881-bc32-83c863bc6352 tempest-ServersV294TestFqdnHostnames-341872804 tempest-ServersV294TestFqdnHostnames-341872804-project-member] [instance: 81cfab4f-6a32-42b0-bbfc-45596bc9ad4e] Instance disappeared before build. {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1296.430577] env[68040]: DEBUG oslo_concurrency.lockutils [None req-3fb08628-7aa9-4881-bc32-83c863bc6352 tempest-ServersV294TestFqdnHostnames-341872804 tempest-ServersV294TestFqdnHostnames-341872804-project-member] Lock "81cfab4f-6a32-42b0-bbfc-45596bc9ad4e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 210.686s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1296.440250] env[68040]: DEBUG nova.compute.manager [None req-a193e22c-3fc1-4312-b71e-31f856aa9166 tempest-VolumesAdminNegativeTest-1864683811 tempest-VolumesAdminNegativeTest-1864683811-project-member] [instance: f6edf79d-5eff-4e2c-94d2-aa5cf1731748] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1296.470231] env[68040]: DEBUG nova.compute.manager [None req-a193e22c-3fc1-4312-b71e-31f856aa9166 tempest-VolumesAdminNegativeTest-1864683811 tempest-VolumesAdminNegativeTest-1864683811-project-member] [instance: f6edf79d-5eff-4e2c-94d2-aa5cf1731748] Instance disappeared before build. {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1296.504590] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a193e22c-3fc1-4312-b71e-31f856aa9166 tempest-VolumesAdminNegativeTest-1864683811 tempest-VolumesAdminNegativeTest-1864683811-project-member] Lock "f6edf79d-5eff-4e2c-94d2-aa5cf1731748" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 205.271s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1296.520084] env[68040]: DEBUG nova.compute.manager [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1296.602443] env[68040]: DEBUG oslo_concurrency.lockutils [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1296.602646] env[68040]: DEBUG oslo_concurrency.lockutils [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1296.604202] env[68040]: INFO nova.compute.claims [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1297.048292] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb615250-41de-465d-9054-faa996d4fadd {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.052865] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-724c3367-0e83-4caf-91b0-76741ab867fd {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.085568] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86537570-2d4c-421d-a372-ee017ca91596 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.093621] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f28acad1-9eca-43fd-a300-1e60e8ec146e {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.108601] env[68040]: DEBUG nova.compute.provider_tree [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1297.120515] env[68040]: DEBUG nova.scheduler.client.report [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1297.140327] env[68040]: DEBUG oslo_concurrency.lockutils [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.537s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1297.140771] env[68040]: DEBUG nova.compute.manager [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Start building networks asynchronously for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1297.183480] env[68040]: DEBUG nova.compute.utils [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Using /dev/sd instead of None {{(pid=68040) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1297.184909] env[68040]: DEBUG nova.compute.manager [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Allocating IP information in the background. {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1297.185277] env[68040]: DEBUG nova.network.neutron [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] allocate_for_instance() {{(pid=68040) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1297.196228] env[68040]: DEBUG nova.compute.manager [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Start building block device mappings for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1297.275850] env[68040]: DEBUG nova.compute.manager [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Start spawning the instance on the hypervisor. {{(pid=68040) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1297.303342] env[68040]: DEBUG nova.virt.hardware [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:59:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:59:33Z,direct_url=,disk_format='vmdk',id=8c308313-03d5-40b6-a5fe-9037e32dc76e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0770d674a39c40089de0aade9440b370',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:59:34Z,virtual_size=,visibility=), allow threads: False {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1297.303604] env[68040]: DEBUG nova.virt.hardware [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Flavor limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1297.303764] env[68040]: DEBUG nova.virt.hardware [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Image limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1297.303952] env[68040]: DEBUG nova.virt.hardware [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Flavor pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1297.305094] env[68040]: DEBUG nova.virt.hardware [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Image pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1297.305819] env[68040]: DEBUG nova.virt.hardware [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1297.307603] env[68040]: DEBUG nova.virt.hardware [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1297.307835] env[68040]: DEBUG nova.virt.hardware [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1297.308073] env[68040]: DEBUG nova.virt.hardware [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Got 1 possible topologies {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1297.308618] env[68040]: DEBUG nova.virt.hardware [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1297.309158] env[68040]: DEBUG nova.virt.hardware [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1297.309668] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5ac2a16-6e97-4824-983d-0a9ef514085e {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.314229] env[68040]: DEBUG nova.policy [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4fdca25678784d958a467db4ecfc929a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '784658a8924c403e88fbd9cc3ff787a0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68040) authorize /opt/stack/nova/nova/policy.py:203}} [ 1297.321842] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4aa6ebc-0727-46af-9770-b6319bb83bcb {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.860543] env[68040]: DEBUG oslo_concurrency.lockutils [None req-9aca850b-bef5-436d-97e8-51de58b83b70 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Acquiring lock "09489d57-c6c1-4ac2-9c14-1a190172970c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1297.861308] env[68040]: DEBUG oslo_concurrency.lockutils [None req-9aca850b-bef5-436d-97e8-51de58b83b70 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Lock "09489d57-c6c1-4ac2-9c14-1a190172970c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1298.214385] env[68040]: DEBUG nova.network.neutron [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Successfully created port: 04c382af-1645-4c83-a0b6-a1001e94e033 {{(pid=68040) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1299.377711] env[68040]: DEBUG oslo_concurrency.lockutils [None req-25111dc7-4803-4770-ba21-7ff836ea8cc4 tempest-ServersTestFqdnHostnames-1502255862 tempest-ServersTestFqdnHostnames-1502255862-project-member] Acquiring lock "b5def543-2cbf-4ecc-b492-3607e5e74e38" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1299.377711] env[68040]: DEBUG oslo_concurrency.lockutils [None req-25111dc7-4803-4770-ba21-7ff836ea8cc4 tempest-ServersTestFqdnHostnames-1502255862 tempest-ServersTestFqdnHostnames-1502255862-project-member] Lock "b5def543-2cbf-4ecc-b492-3607e5e74e38" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1299.491822] env[68040]: DEBUG nova.network.neutron [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Successfully updated port: 04c382af-1645-4c83-a0b6-a1001e94e033 {{(pid=68040) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1299.499588] env[68040]: DEBUG nova.compute.manager [req-bfddda39-6a6c-45eb-b9a5-2d781c93164a req-f5fca584-d335-4e93-9a49-fd33033856a7 service nova] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Received event network-vif-plugged-04c382af-1645-4c83-a0b6-a1001e94e033 {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1299.499798] env[68040]: DEBUG oslo_concurrency.lockutils [req-bfddda39-6a6c-45eb-b9a5-2d781c93164a req-f5fca584-d335-4e93-9a49-fd33033856a7 service nova] Acquiring lock "c581d685-7ea0-41f8-b911-ff1dce1b46c7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1299.500059] env[68040]: DEBUG oslo_concurrency.lockutils [req-bfddda39-6a6c-45eb-b9a5-2d781c93164a req-f5fca584-d335-4e93-9a49-fd33033856a7 service nova] Lock "c581d685-7ea0-41f8-b911-ff1dce1b46c7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1299.500236] env[68040]: DEBUG oslo_concurrency.lockutils [req-bfddda39-6a6c-45eb-b9a5-2d781c93164a req-f5fca584-d335-4e93-9a49-fd33033856a7 service nova] Lock "c581d685-7ea0-41f8-b911-ff1dce1b46c7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1299.500408] env[68040]: DEBUG nova.compute.manager [req-bfddda39-6a6c-45eb-b9a5-2d781c93164a req-f5fca584-d335-4e93-9a49-fd33033856a7 service nova] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] No waiting events found dispatching network-vif-plugged-04c382af-1645-4c83-a0b6-a1001e94e033 {{(pid=68040) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1299.500567] env[68040]: WARNING nova.compute.manager [req-bfddda39-6a6c-45eb-b9a5-2d781c93164a req-f5fca584-d335-4e93-9a49-fd33033856a7 service nova] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Received unexpected event network-vif-plugged-04c382af-1645-4c83-a0b6-a1001e94e033 for instance with vm_state building and task_state spawning. [ 1299.509316] env[68040]: DEBUG oslo_concurrency.lockutils [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Acquiring lock "refresh_cache-c581d685-7ea0-41f8-b911-ff1dce1b46c7" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1299.511027] env[68040]: DEBUG oslo_concurrency.lockutils [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Acquired lock "refresh_cache-c581d685-7ea0-41f8-b911-ff1dce1b46c7" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1299.511027] env[68040]: DEBUG nova.network.neutron [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Building network info cache for instance {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1299.578067] env[68040]: DEBUG nova.network.neutron [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1299.881328] env[68040]: DEBUG nova.network.neutron [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Updating instance_info_cache with network_info: [{"id": "04c382af-1645-4c83-a0b6-a1001e94e033", "address": "fa:16:3e:be:39:c1", "network": {"id": "0977b41e-8a7d-4917-a780-9c1eb7453e4c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1372225524-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "784658a8924c403e88fbd9cc3ff787a0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap04c382af-16", "ovs_interfaceid": "04c382af-1645-4c83-a0b6-a1001e94e033", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1299.913878] env[68040]: DEBUG oslo_concurrency.lockutils [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Releasing lock "refresh_cache-c581d685-7ea0-41f8-b911-ff1dce1b46c7" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1299.914218] env[68040]: DEBUG nova.compute.manager [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Instance network_info: |[{"id": "04c382af-1645-4c83-a0b6-a1001e94e033", "address": "fa:16:3e:be:39:c1", "network": {"id": "0977b41e-8a7d-4917-a780-9c1eb7453e4c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1372225524-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "784658a8924c403e88fbd9cc3ff787a0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap04c382af-16", "ovs_interfaceid": "04c382af-1645-4c83-a0b6-a1001e94e033", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1299.914645] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:be:39:c1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '680cb499-2a47-482b-af0d-112016ac0e17', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '04c382af-1645-4c83-a0b6-a1001e94e033', 'vif_model': 'vmxnet3'}] {{(pid=68040) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1299.922473] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Creating folder: Project (784658a8924c403e88fbd9cc3ff787a0). Parent ref: group-v639956. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1299.923026] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-274a32a9-3d12-4f67-9f54-67a6f43409a4 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.937793] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Created folder: Project (784658a8924c403e88fbd9cc3ff787a0) in parent group-v639956. [ 1299.937997] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Creating folder: Instances. Parent ref: group-v640032. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1299.938465] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4c783f30-b361-4512-9152-2a1334a922f8 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.947827] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Created folder: Instances in parent group-v640032. [ 1299.948179] env[68040]: DEBUG oslo.service.loopingcall [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1299.948431] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Creating VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1299.948720] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0ba2ae92-00d5-4e8b-9702-a352538d5746 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.967656] env[68040]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1299.967656] env[68040]: value = "task-3200272" [ 1299.967656] env[68040]: _type = "Task" [ 1299.967656] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1299.974993] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200272, 'name': CreateVM_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.477736] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200272, 'name': CreateVM_Task, 'duration_secs': 0.382186} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1300.478093] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Created VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1300.485225] env[68040]: DEBUG oslo_concurrency.lockutils [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1300.485390] env[68040]: DEBUG oslo_concurrency.lockutils [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1300.485695] env[68040]: DEBUG oslo_concurrency.lockutils [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1300.485956] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-406244c2-5a38-452b-943c-86a08cc538cb {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.490455] env[68040]: DEBUG oslo_vmware.api [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Waiting for the task: (returnval){ [ 1300.490455] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52cad286-f72c-7e90-3524-a4b35c35699b" [ 1300.490455] env[68040]: _type = "Task" [ 1300.490455] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1300.498079] env[68040]: DEBUG oslo_vmware.api [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52cad286-f72c-7e90-3524-a4b35c35699b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.000832] env[68040]: DEBUG oslo_concurrency.lockutils [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1301.001114] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Processing image 8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1301.001334] env[68040]: DEBUG oslo_concurrency.lockutils [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1301.613053] env[68040]: DEBUG nova.compute.manager [req-7a1e1b55-74b1-4801-9608-c7fb4adfc785 req-143e2fb4-5c3a-4397-98a5-942c03ccb51c service nova] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Received event network-changed-04c382af-1645-4c83-a0b6-a1001e94e033 {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1301.613277] env[68040]: DEBUG nova.compute.manager [req-7a1e1b55-74b1-4801-9608-c7fb4adfc785 req-143e2fb4-5c3a-4397-98a5-942c03ccb51c service nova] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Refreshing instance network info cache due to event network-changed-04c382af-1645-4c83-a0b6-a1001e94e033. {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1301.613466] env[68040]: DEBUG oslo_concurrency.lockutils [req-7a1e1b55-74b1-4801-9608-c7fb4adfc785 req-143e2fb4-5c3a-4397-98a5-942c03ccb51c service nova] Acquiring lock "refresh_cache-c581d685-7ea0-41f8-b911-ff1dce1b46c7" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1301.613711] env[68040]: DEBUG oslo_concurrency.lockutils [req-7a1e1b55-74b1-4801-9608-c7fb4adfc785 req-143e2fb4-5c3a-4397-98a5-942c03ccb51c service nova] Acquired lock "refresh_cache-c581d685-7ea0-41f8-b911-ff1dce1b46c7" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1301.613759] env[68040]: DEBUG nova.network.neutron [req-7a1e1b55-74b1-4801-9608-c7fb4adfc785 req-143e2fb4-5c3a-4397-98a5-942c03ccb51c service nova] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Refreshing network info cache for port 04c382af-1645-4c83-a0b6-a1001e94e033 {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1302.051404] env[68040]: DEBUG nova.network.neutron [req-7a1e1b55-74b1-4801-9608-c7fb4adfc785 req-143e2fb4-5c3a-4397-98a5-942c03ccb51c service nova] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Updated VIF entry in instance network info cache for port 04c382af-1645-4c83-a0b6-a1001e94e033. {{(pid=68040) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1302.051734] env[68040]: DEBUG nova.network.neutron [req-7a1e1b55-74b1-4801-9608-c7fb4adfc785 req-143e2fb4-5c3a-4397-98a5-942c03ccb51c service nova] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Updating instance_info_cache with network_info: [{"id": "04c382af-1645-4c83-a0b6-a1001e94e033", "address": "fa:16:3e:be:39:c1", "network": {"id": "0977b41e-8a7d-4917-a780-9c1eb7453e4c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1372225524-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "784658a8924c403e88fbd9cc3ff787a0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap04c382af-16", "ovs_interfaceid": "04c382af-1645-4c83-a0b6-a1001e94e033", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1302.061257] env[68040]: DEBUG oslo_concurrency.lockutils [req-7a1e1b55-74b1-4801-9608-c7fb4adfc785 req-143e2fb4-5c3a-4397-98a5-942c03ccb51c service nova] Releasing lock "refresh_cache-c581d685-7ea0-41f8-b911-ff1dce1b46c7" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1309.258144] env[68040]: DEBUG oslo_concurrency.lockutils [None req-12c94775-7ff4-4860-8da5-7478cc755a1d tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Acquiring lock "c581d685-7ea0-41f8-b911-ff1dce1b46c7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1315.346905] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Acquiring lock "f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1315.346905] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Lock "f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1315.709726] env[68040]: DEBUG oslo_concurrency.lockutils [None req-bf318a9d-6437-4000-a9fe-936780bbf9c8 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Acquiring lock "f9d3a788-4f1b-46f7-83ab-dd6884f68d2e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1315.710009] env[68040]: DEBUG oslo_concurrency.lockutils [None req-bf318a9d-6437-4000-a9fe-936780bbf9c8 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Lock "f9d3a788-4f1b-46f7-83ab-dd6884f68d2e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1324.808799] env[68040]: DEBUG oslo_concurrency.lockutils [None req-1f92486e-a8ce-4c1c-a14d-d0d1363460a8 tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Acquiring lock "83475c46-38de-4918-91b4-b53dcf3ead77" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1324.809127] env[68040]: DEBUG oslo_concurrency.lockutils [None req-1f92486e-a8ce-4c1c-a14d-d0d1363460a8 tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Lock "83475c46-38de-4918-91b4-b53dcf3ead77" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1327.984618] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1328.985331] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1328.985331] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Starting heal instance info cache {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1328.985331] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Rebuilding the list of instances to heal {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1329.019245] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1329.021625] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1329.021625] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1329.021625] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1329.021625] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1329.021625] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1329.021880] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1329.021880] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1329.021880] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1329.021880] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1329.021880] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Didn't find any instances for network info cache update. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1329.022866] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1329.022866] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1329.035790] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1329.035790] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1329.035790] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1329.035790] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68040) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1329.036660] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b5d66ef-9ed6-40c4-ab1e-8003bacec5d0 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.046335] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e74e6fbc-fac8-4ed9-bd68-0e6754cf80fb {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.064229] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddfbb438-aa3d-4c9e-a39f-3796c5d9b4c0 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.079008] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54822637-61b9-44d2-abb8-c4787504b35d {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.113151] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180989MB free_disk=125GB free_vcpus=48 pci_devices=None {{(pid=68040) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1329.113338] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1329.113541] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1329.214599] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance de1b8ef9-0088-4d2a-985e-d04fcff55d31 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1329.214599] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance bce68a2b-260c-45cc-ac98-d4b01b4513a4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1329.214599] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 1e43f6be-f6a3-4569-adea-c82a5d709247 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1329.214599] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 3738de32-79cd-4b04-8081-cc1146730c75 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1329.214971] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1329.214971] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1329.214971] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance e92b662c-b458-49d8-ac2a-00ae6046a11b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1329.214971] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 57cd94c2-aec3-427e-9b9f-a444fe291974 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1329.215241] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 87a7851e-d6fe-481a-8abb-5732e281cb64 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1329.215241] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance c581d685-7ea0-41f8-b911-ff1dce1b46c7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1329.229617] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 462c8f10-1dda-4687-946c-fb40c3e4f049 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1329.239158] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance e54d02e5-1e98-4e9d-93e7-bcccfa3307e1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1329.251488] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 4a08d3e3-5e84-4f34-b418-2c18eadbef25 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1329.265638] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 268b5613-b132-49ed-a45b-bc88132177cf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1329.279995] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 4dfa01f8-53a0-4ee4-9b00-93017144ea0b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1329.290407] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 4ce0934f-8277-4029-8a0c-77468ee9b6dc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1329.300823] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 09489d57-c6c1-4ac2-9c14-1a190172970c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1329.316579] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance b5def543-2cbf-4ecc-b492-3607e5e74e38 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1329.331053] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1329.342306] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance f9d3a788-4f1b-46f7-83ab-dd6884f68d2e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1329.355138] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 83475c46-38de-4918-91b4-b53dcf3ead77 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1329.355399] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1329.355552] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1329.375213] env[68040]: DEBUG nova.scheduler.client.report [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Refreshing inventories for resource provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 1329.391276] env[68040]: DEBUG nova.scheduler.client.report [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Updating ProviderTree inventory for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 1329.391564] env[68040]: DEBUG nova.compute.provider_tree [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Updating inventory in ProviderTree for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1329.405077] env[68040]: DEBUG nova.scheduler.client.report [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Refreshing aggregate associations for resource provider 22db6f73-b3da-436a-bf40-9c8c240b2e44, aggregates: None {{(pid=68040) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 1329.430161] env[68040]: DEBUG nova.scheduler.client.report [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Refreshing trait associations for resource provider 22db6f73-b3da-436a-bf40-9c8c240b2e44, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=68040) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 1329.801068] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44ed2641-2860-414a-966d-0dd49151eebd {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.809744] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fb5c943-2d4c-43d8-abce-6b2248dcc05c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.839775] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f47687b5-c2ae-4905-99be-757c794fbfcd {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.848650] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-005a0d74-9dac-4736-a187-c730e07afd4a {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.862698] env[68040]: DEBUG nova.compute.provider_tree [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1329.875226] env[68040]: DEBUG nova.scheduler.client.report [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1329.895010] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68040) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1329.895010] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.780s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1330.855652] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1330.855652] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1331.556525] env[68040]: DEBUG oslo_concurrency.lockutils [None req-b569d8ef-a337-4d3d-8463-ef8abdd9daf1 tempest-ListServersNegativeTestJSON-595429082 tempest-ListServersNegativeTestJSON-595429082-project-member] Acquiring lock "6541b54b-214d-432c-8ae6-5de4ed99390f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1331.556741] env[68040]: DEBUG oslo_concurrency.lockutils [None req-b569d8ef-a337-4d3d-8463-ef8abdd9daf1 tempest-ListServersNegativeTestJSON-595429082 tempest-ListServersNegativeTestJSON-595429082-project-member] Lock "6541b54b-214d-432c-8ae6-5de4ed99390f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1331.579935] env[68040]: DEBUG oslo_concurrency.lockutils [None req-b569d8ef-a337-4d3d-8463-ef8abdd9daf1 tempest-ListServersNegativeTestJSON-595429082 tempest-ListServersNegativeTestJSON-595429082-project-member] Acquiring lock "d8f18a76-588b-4329-a167-2a571f82455f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1331.580225] env[68040]: DEBUG oslo_concurrency.lockutils [None req-b569d8ef-a337-4d3d-8463-ef8abdd9daf1 tempest-ListServersNegativeTestJSON-595429082 tempest-ListServersNegativeTestJSON-595429082-project-member] Lock "d8f18a76-588b-4329-a167-2a571f82455f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1331.612359] env[68040]: DEBUG oslo_concurrency.lockutils [None req-b569d8ef-a337-4d3d-8463-ef8abdd9daf1 tempest-ListServersNegativeTestJSON-595429082 tempest-ListServersNegativeTestJSON-595429082-project-member] Acquiring lock "285249df-f5b4-4a68-89fe-9281fe1573e5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1331.612608] env[68040]: DEBUG oslo_concurrency.lockutils [None req-b569d8ef-a337-4d3d-8463-ef8abdd9daf1 tempest-ListServersNegativeTestJSON-595429082 tempest-ListServersNegativeTestJSON-595429082-project-member] Lock "285249df-f5b4-4a68-89fe-9281fe1573e5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1331.983707] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1332.979738] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1334.979763] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1335.984150] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1335.984483] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68040) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1343.495707] env[68040]: WARNING oslo_vmware.rw_handles [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1343.495707] env[68040]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1343.495707] env[68040]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1343.495707] env[68040]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1343.495707] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1343.495707] env[68040]: ERROR oslo_vmware.rw_handles response.begin() [ 1343.495707] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1343.495707] env[68040]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1343.495707] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1343.495707] env[68040]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1343.495707] env[68040]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1343.495707] env[68040]: ERROR oslo_vmware.rw_handles [ 1343.496215] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Downloaded image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to vmware_temp/89195c32-3ebf-4f1c-853f-5a96dd3f415d/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1343.498121] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Caching image {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1343.498391] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Copying Virtual Disk [datastore2] vmware_temp/89195c32-3ebf-4f1c-853f-5a96dd3f415d/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk to [datastore2] vmware_temp/89195c32-3ebf-4f1c-853f-5a96dd3f415d/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk {{(pid=68040) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1343.498678] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4d8f1981-6129-42c1-9bc1-7111ed8c6f8a {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.506962] env[68040]: DEBUG oslo_vmware.api [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Waiting for the task: (returnval){ [ 1343.506962] env[68040]: value = "task-3200273" [ 1343.506962] env[68040]: _type = "Task" [ 1343.506962] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1343.515171] env[68040]: DEBUG oslo_vmware.api [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Task: {'id': task-3200273, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.020353] env[68040]: DEBUG oslo_vmware.exceptions [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Fault InvalidArgument not matched. {{(pid=68040) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1344.020655] env[68040]: DEBUG oslo_concurrency.lockutils [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1344.021228] env[68040]: ERROR nova.compute.manager [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1344.021228] env[68040]: Faults: ['InvalidArgument'] [ 1344.021228] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Traceback (most recent call last): [ 1344.021228] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1344.021228] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] yield resources [ 1344.021228] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1344.021228] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] self.driver.spawn(context, instance, image_meta, [ 1344.021228] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1344.021228] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1344.021228] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1344.021228] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] self._fetch_image_if_missing(context, vi) [ 1344.021228] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1344.021690] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] image_cache(vi, tmp_image_ds_loc) [ 1344.021690] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1344.021690] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] vm_util.copy_virtual_disk( [ 1344.021690] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1344.021690] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] session._wait_for_task(vmdk_copy_task) [ 1344.021690] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1344.021690] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] return self.wait_for_task(task_ref) [ 1344.021690] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1344.021690] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] return evt.wait() [ 1344.021690] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1344.021690] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] result = hub.switch() [ 1344.021690] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1344.021690] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] return self.greenlet.switch() [ 1344.022126] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1344.022126] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] self.f(*self.args, **self.kw) [ 1344.022126] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1344.022126] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] raise exceptions.translate_fault(task_info.error) [ 1344.022126] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1344.022126] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Faults: ['InvalidArgument'] [ 1344.022126] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] [ 1344.022126] env[68040]: INFO nova.compute.manager [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Terminating instance [ 1344.023190] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1344.023452] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1344.023954] env[68040]: DEBUG oslo_concurrency.lockutils [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Acquiring lock "refresh_cache-de1b8ef9-0088-4d2a-985e-d04fcff55d31" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1344.024147] env[68040]: DEBUG oslo_concurrency.lockutils [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Acquired lock "refresh_cache-de1b8ef9-0088-4d2a-985e-d04fcff55d31" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1344.024319] env[68040]: DEBUG nova.network.neutron [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Building network info cache for instance {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1344.025417] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d0a0f5f5-2737-4741-8875-6a566389225d {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.036413] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1344.036641] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68040) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1344.037697] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f4f8d00-c54c-432c-a2bb-4708c5df6075 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.042739] env[68040]: DEBUG oslo_vmware.api [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Waiting for the task: (returnval){ [ 1344.042739] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52933ad3-a712-0d69-57ab-8423445d035e" [ 1344.042739] env[68040]: _type = "Task" [ 1344.042739] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.050895] env[68040]: DEBUG oslo_vmware.api [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52933ad3-a712-0d69-57ab-8423445d035e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.055169] env[68040]: DEBUG nova.network.neutron [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1344.204473] env[68040]: DEBUG nova.network.neutron [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1344.214862] env[68040]: DEBUG oslo_concurrency.lockutils [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Releasing lock "refresh_cache-de1b8ef9-0088-4d2a-985e-d04fcff55d31" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1344.215355] env[68040]: DEBUG nova.compute.manager [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1344.215559] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1344.216703] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e88efdcc-a03c-47e0-95b9-9b94d922ea32 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.229515] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Unregistering the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1344.229819] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-acf76954-cd29-47dc-bb51-99d61a4ea392 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.266112] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Unregistered the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1344.266471] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Deleting contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1344.266581] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Deleting the datastore file [datastore2] de1b8ef9-0088-4d2a-985e-d04fcff55d31 {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1344.266875] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e938e158-2e6b-4a63-a3c4-3500b14a8971 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.275113] env[68040]: DEBUG oslo_vmware.api [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Waiting for the task: (returnval){ [ 1344.275113] env[68040]: value = "task-3200275" [ 1344.275113] env[68040]: _type = "Task" [ 1344.275113] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.286371] env[68040]: DEBUG oslo_vmware.api [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Task: {'id': task-3200275, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.553082] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Preparing fetch location {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1344.553368] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Creating directory with path [datastore2] vmware_temp/2d2d21fc-f8fc-4552-a6eb-d37be35d5d66/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1344.553597] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3e3f8599-ef58-4a10-92c4-834454dab6b7 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.566133] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Created directory with path [datastore2] vmware_temp/2d2d21fc-f8fc-4552-a6eb-d37be35d5d66/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1344.566330] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Fetch image to [datastore2] vmware_temp/2d2d21fc-f8fc-4552-a6eb-d37be35d5d66/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1344.566499] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to [datastore2] vmware_temp/2d2d21fc-f8fc-4552-a6eb-d37be35d5d66/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1344.567738] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1080d11d-09c1-4d70-90d7-32002ed8742d {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.574403] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5cfea07-4641-4bf1-85ce-011e39d797ec {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.583522] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79a362ff-f051-4deb-9580-04ea5c54f971 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.614871] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d18aa71-4529-49f8-9a6b-a025694ba1d4 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.620788] env[68040]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-65f6d462-927f-4c2b-a530-e6ac69d6e560 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.643502] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1344.695182] env[68040]: DEBUG oslo_vmware.rw_handles [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2d2d21fc-f8fc-4552-a6eb-d37be35d5d66/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1344.759613] env[68040]: DEBUG oslo_vmware.rw_handles [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Completed reading data from the image iterator. {{(pid=68040) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1344.759995] env[68040]: DEBUG oslo_vmware.rw_handles [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2d2d21fc-f8fc-4552-a6eb-d37be35d5d66/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1344.788821] env[68040]: DEBUG oslo_vmware.api [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Task: {'id': task-3200275, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.039307} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1344.788821] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Deleted the datastore file {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1344.788821] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Deleted contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1344.788821] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1344.788821] env[68040]: INFO nova.compute.manager [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Took 0.57 seconds to destroy the instance on the hypervisor. [ 1344.789023] env[68040]: DEBUG oslo.service.loopingcall [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1344.789023] env[68040]: DEBUG nova.compute.manager [-] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Skipping network deallocation for instance since networking was not requested. {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 1344.789552] env[68040]: DEBUG nova.compute.claims [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Aborting claim: {{(pid=68040) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1344.789839] env[68040]: DEBUG oslo_concurrency.lockutils [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1344.790171] env[68040]: DEBUG oslo_concurrency.lockutils [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1345.154549] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-159ead8a-cc5d-4bb8-86f9-b7114a35c8e5 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.162106] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ec8ee47-e8a3-4154-895e-e971d3bbffc9 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.191202] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60523610-ff37-4e5d-ad70-5ff1e66e0a02 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.198360] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63653c49-925a-4d3f-a89b-1439796faf55 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.212594] env[68040]: DEBUG nova.compute.provider_tree [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1345.221640] env[68040]: DEBUG nova.scheduler.client.report [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1345.243234] env[68040]: DEBUG oslo_concurrency.lockutils [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.453s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1345.243784] env[68040]: ERROR nova.compute.manager [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1345.243784] env[68040]: Faults: ['InvalidArgument'] [ 1345.243784] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Traceback (most recent call last): [ 1345.243784] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1345.243784] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] self.driver.spawn(context, instance, image_meta, [ 1345.243784] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1345.243784] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1345.243784] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1345.243784] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] self._fetch_image_if_missing(context, vi) [ 1345.243784] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1345.243784] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] image_cache(vi, tmp_image_ds_loc) [ 1345.243784] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1345.244309] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] vm_util.copy_virtual_disk( [ 1345.244309] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1345.244309] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] session._wait_for_task(vmdk_copy_task) [ 1345.244309] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1345.244309] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] return self.wait_for_task(task_ref) [ 1345.244309] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1345.244309] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] return evt.wait() [ 1345.244309] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1345.244309] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] result = hub.switch() [ 1345.244309] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1345.244309] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] return self.greenlet.switch() [ 1345.244309] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1345.244309] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] self.f(*self.args, **self.kw) [ 1345.244638] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1345.244638] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] raise exceptions.translate_fault(task_info.error) [ 1345.244638] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1345.244638] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Faults: ['InvalidArgument'] [ 1345.244638] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] [ 1345.244638] env[68040]: DEBUG nova.compute.utils [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] VimFaultException {{(pid=68040) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1345.246150] env[68040]: DEBUG nova.compute.manager [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Build of instance de1b8ef9-0088-4d2a-985e-d04fcff55d31 was re-scheduled: A specified parameter was not correct: fileType [ 1345.246150] env[68040]: Faults: ['InvalidArgument'] {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1345.246549] env[68040]: DEBUG nova.compute.manager [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Unplugging VIFs for instance {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1345.246777] env[68040]: DEBUG oslo_concurrency.lockutils [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Acquiring lock "refresh_cache-de1b8ef9-0088-4d2a-985e-d04fcff55d31" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1345.246928] env[68040]: DEBUG oslo_concurrency.lockutils [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Acquired lock "refresh_cache-de1b8ef9-0088-4d2a-985e-d04fcff55d31" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1345.247135] env[68040]: DEBUG nova.network.neutron [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Building network info cache for instance {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1345.275678] env[68040]: DEBUG nova.network.neutron [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1345.400240] env[68040]: DEBUG nova.network.neutron [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1345.409124] env[68040]: DEBUG oslo_concurrency.lockutils [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Releasing lock "refresh_cache-de1b8ef9-0088-4d2a-985e-d04fcff55d31" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1345.409440] env[68040]: DEBUG nova.compute.manager [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1345.409626] env[68040]: DEBUG nova.compute.manager [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Skipping network deallocation for instance since networking was not requested. {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 1345.505757] env[68040]: INFO nova.scheduler.client.report [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Deleted allocations for instance de1b8ef9-0088-4d2a-985e-d04fcff55d31 [ 1345.529344] env[68040]: DEBUG oslo_concurrency.lockutils [None req-24c7d6e7-6e41-4ded-8212-50d1527298ec tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Lock "de1b8ef9-0088-4d2a-985e-d04fcff55d31" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 643.706s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1345.530565] env[68040]: DEBUG oslo_concurrency.lockutils [None req-73f9695a-d179-44c6-ab90-76938bbaa53b tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Lock "de1b8ef9-0088-4d2a-985e-d04fcff55d31" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 445.172s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1345.530783] env[68040]: DEBUG oslo_concurrency.lockutils [None req-73f9695a-d179-44c6-ab90-76938bbaa53b tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Acquiring lock "de1b8ef9-0088-4d2a-985e-d04fcff55d31-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1345.530993] env[68040]: DEBUG oslo_concurrency.lockutils [None req-73f9695a-d179-44c6-ab90-76938bbaa53b tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Lock "de1b8ef9-0088-4d2a-985e-d04fcff55d31-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1345.531167] env[68040]: DEBUG oslo_concurrency.lockutils [None req-73f9695a-d179-44c6-ab90-76938bbaa53b tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Lock "de1b8ef9-0088-4d2a-985e-d04fcff55d31-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1345.533114] env[68040]: INFO nova.compute.manager [None req-73f9695a-d179-44c6-ab90-76938bbaa53b tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Terminating instance [ 1345.535038] env[68040]: DEBUG oslo_concurrency.lockutils [None req-73f9695a-d179-44c6-ab90-76938bbaa53b tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Acquiring lock "refresh_cache-de1b8ef9-0088-4d2a-985e-d04fcff55d31" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1345.535202] env[68040]: DEBUG oslo_concurrency.lockutils [None req-73f9695a-d179-44c6-ab90-76938bbaa53b tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Acquired lock "refresh_cache-de1b8ef9-0088-4d2a-985e-d04fcff55d31" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1345.535374] env[68040]: DEBUG nova.network.neutron [None req-73f9695a-d179-44c6-ab90-76938bbaa53b tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Building network info cache for instance {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1345.543970] env[68040]: DEBUG nova.compute.manager [None req-ffa334c1-46ca-4321-8ef4-84666042e7cc tempest-ServerMetadataNegativeTestJSON-942408065 tempest-ServerMetadataNegativeTestJSON-942408065-project-member] [instance: 310e3ad1-aa4c-44d1-b1e9-152d1de39125] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1345.562329] env[68040]: DEBUG nova.network.neutron [None req-73f9695a-d179-44c6-ab90-76938bbaa53b tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1345.566804] env[68040]: DEBUG nova.compute.manager [None req-ffa334c1-46ca-4321-8ef4-84666042e7cc tempest-ServerMetadataNegativeTestJSON-942408065 tempest-ServerMetadataNegativeTestJSON-942408065-project-member] [instance: 310e3ad1-aa4c-44d1-b1e9-152d1de39125] Instance disappeared before build. {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1345.594727] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ffa334c1-46ca-4321-8ef4-84666042e7cc tempest-ServerMetadataNegativeTestJSON-942408065 tempest-ServerMetadataNegativeTestJSON-942408065-project-member] Lock "310e3ad1-aa4c-44d1-b1e9-152d1de39125" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 230.958s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1345.604295] env[68040]: DEBUG nova.compute.manager [None req-4fff48aa-07ba-4031-bce1-a66c45599b83 tempest-ServerGroupTestJSON-1986318540 tempest-ServerGroupTestJSON-1986318540-project-member] [instance: 72f5f3ba-c931-40a5-ab73-4e6738e0aaba] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1345.630069] env[68040]: DEBUG nova.compute.manager [None req-4fff48aa-07ba-4031-bce1-a66c45599b83 tempest-ServerGroupTestJSON-1986318540 tempest-ServerGroupTestJSON-1986318540-project-member] [instance: 72f5f3ba-c931-40a5-ab73-4e6738e0aaba] Instance disappeared before build. {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1345.631381] env[68040]: DEBUG nova.network.neutron [None req-73f9695a-d179-44c6-ab90-76938bbaa53b tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1345.639020] env[68040]: DEBUG oslo_concurrency.lockutils [None req-73f9695a-d179-44c6-ab90-76938bbaa53b tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Releasing lock "refresh_cache-de1b8ef9-0088-4d2a-985e-d04fcff55d31" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1345.639299] env[68040]: DEBUG nova.compute.manager [None req-73f9695a-d179-44c6-ab90-76938bbaa53b tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1345.639534] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-73f9695a-d179-44c6-ab90-76938bbaa53b tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1345.640236] env[68040]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-308f67e6-b8e0-4381-8c47-1392ba02514d {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.653585] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43b0f426-37ce-4552-995b-717f4ac4533b {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.666442] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4fff48aa-07ba-4031-bce1-a66c45599b83 tempest-ServerGroupTestJSON-1986318540 tempest-ServerGroupTestJSON-1986318540-project-member] Lock "72f5f3ba-c931-40a5-ab73-4e6738e0aaba" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 229.161s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1345.687023] env[68040]: WARNING nova.virt.vmwareapi.vmops [None req-73f9695a-d179-44c6-ab90-76938bbaa53b tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance de1b8ef9-0088-4d2a-985e-d04fcff55d31 could not be found. [ 1345.687302] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-73f9695a-d179-44c6-ab90-76938bbaa53b tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1345.687509] env[68040]: INFO nova.compute.manager [None req-73f9695a-d179-44c6-ab90-76938bbaa53b tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1345.687838] env[68040]: DEBUG oslo.service.loopingcall [None req-73f9695a-d179-44c6-ab90-76938bbaa53b tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1345.688195] env[68040]: DEBUG nova.compute.manager [None req-a0d7a4c2-38ad-452c-b7f1-ca0c8bc2ec0f tempest-ServersTestBootFromVolume-1340526347 tempest-ServersTestBootFromVolume-1340526347-project-member] [instance: c2f603dd-6a9b-4a0f-b50a-263cf8eb70af] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1345.695028] env[68040]: DEBUG nova.compute.manager [-] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1345.695028] env[68040]: DEBUG nova.network.neutron [-] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1345.718478] env[68040]: DEBUG nova.compute.manager [None req-a0d7a4c2-38ad-452c-b7f1-ca0c8bc2ec0f tempest-ServersTestBootFromVolume-1340526347 tempest-ServersTestBootFromVolume-1340526347-project-member] [instance: c2f603dd-6a9b-4a0f-b50a-263cf8eb70af] Instance disappeared before build. {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1345.742477] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a0d7a4c2-38ad-452c-b7f1-ca0c8bc2ec0f tempest-ServersTestBootFromVolume-1340526347 tempest-ServersTestBootFromVolume-1340526347-project-member] Lock "c2f603dd-6a9b-4a0f-b50a-263cf8eb70af" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 225.800s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1345.753417] env[68040]: DEBUG nova.compute.manager [None req-844fd553-d870-4df2-bd91-7f10aa5f2aa3 tempest-InstanceActionsV221TestJSON-1568005960 tempest-InstanceActionsV221TestJSON-1568005960-project-member] [instance: ec66dda9-4e56-4baa-b8aa-8b01f28d8e9c] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1345.782163] env[68040]: DEBUG nova.compute.manager [None req-844fd553-d870-4df2-bd91-7f10aa5f2aa3 tempest-InstanceActionsV221TestJSON-1568005960 tempest-InstanceActionsV221TestJSON-1568005960-project-member] [instance: ec66dda9-4e56-4baa-b8aa-8b01f28d8e9c] Instance disappeared before build. {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1345.807919] env[68040]: DEBUG oslo_concurrency.lockutils [None req-844fd553-d870-4df2-bd91-7f10aa5f2aa3 tempest-InstanceActionsV221TestJSON-1568005960 tempest-InstanceActionsV221TestJSON-1568005960-project-member] Lock "ec66dda9-4e56-4baa-b8aa-8b01f28d8e9c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 222.007s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1345.817849] env[68040]: DEBUG nova.compute.manager [None req-5cf588d9-4f1f-4813-9b5c-123759522daa tempest-ServersTestMultiNic-1524601141 tempest-ServersTestMultiNic-1524601141-project-member] [instance: 8535d103-7bdf-4210-aa1e-180bb100de5f] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1345.835441] env[68040]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=68040) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1345.835580] env[68040]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1345.836306] env[68040]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1345.836306] env[68040]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1345.836306] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1345.836306] env[68040]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1345.836306] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1345.836306] env[68040]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1345.836306] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1345.836306] env[68040]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1345.836306] env[68040]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1345.836306] env[68040]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-4007a6f1-a9c8-4ec9-8e31-50e1fe775a48'] [ 1345.836306] env[68040]: ERROR oslo.service.loopingcall [ 1345.836306] env[68040]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1345.836306] env[68040]: ERROR oslo.service.loopingcall [ 1345.836306] env[68040]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1345.836306] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1345.836306] env[68040]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1345.836728] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1345.836728] env[68040]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1345.836728] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1345.836728] env[68040]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1345.836728] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1345.836728] env[68040]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1345.836728] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1345.836728] env[68040]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1345.836728] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1345.836728] env[68040]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1345.836728] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1345.836728] env[68040]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1345.836728] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1345.836728] env[68040]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1345.836728] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1345.836728] env[68040]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1345.836728] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1345.836728] env[68040]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1345.837235] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1345.837235] env[68040]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1345.837235] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1345.837235] env[68040]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1345.837235] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1345.837235] env[68040]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1345.837235] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1345.837235] env[68040]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1345.837235] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1345.837235] env[68040]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1345.837235] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1345.837235] env[68040]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1345.837235] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1345.837235] env[68040]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1345.837235] env[68040]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1345.837235] env[68040]: ERROR oslo.service.loopingcall [ 1345.837626] env[68040]: ERROR nova.compute.manager [None req-73f9695a-d179-44c6-ab90-76938bbaa53b tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1345.847860] env[68040]: DEBUG nova.compute.manager [None req-5cf588d9-4f1f-4813-9b5c-123759522daa tempest-ServersTestMultiNic-1524601141 tempest-ServersTestMultiNic-1524601141-project-member] [instance: 8535d103-7bdf-4210-aa1e-180bb100de5f] Instance disappeared before build. {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1345.867631] env[68040]: DEBUG oslo_concurrency.lockutils [None req-5cf588d9-4f1f-4813-9b5c-123759522daa tempest-ServersTestMultiNic-1524601141 tempest-ServersTestMultiNic-1524601141-project-member] Lock "8535d103-7bdf-4210-aa1e-180bb100de5f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 218.201s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1345.881757] env[68040]: ERROR nova.compute.manager [None req-73f9695a-d179-44c6-ab90-76938bbaa53b tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1345.881757] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Traceback (most recent call last): [ 1345.881757] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1345.881757] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] ret = obj(*args, **kwargs) [ 1345.881757] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1345.881757] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] exception_handler_v20(status_code, error_body) [ 1345.881757] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1345.881757] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] raise client_exc(message=error_message, [ 1345.881757] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1345.881757] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Neutron server returns request_ids: ['req-4007a6f1-a9c8-4ec9-8e31-50e1fe775a48'] [ 1345.881757] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] [ 1345.882097] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] During handling of the above exception, another exception occurred: [ 1345.882097] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] [ 1345.882097] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Traceback (most recent call last): [ 1345.882097] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1345.882097] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] self._delete_instance(context, instance, bdms) [ 1345.882097] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1345.882097] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] self._shutdown_instance(context, instance, bdms) [ 1345.882097] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1345.882097] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] self._try_deallocate_network(context, instance, requested_networks) [ 1345.882097] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1345.882097] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] with excutils.save_and_reraise_exception(): [ 1345.882097] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1345.882097] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] self.force_reraise() [ 1345.882399] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1345.882399] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] raise self.value [ 1345.882399] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1345.882399] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] _deallocate_network_with_retries() [ 1345.882399] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1345.882399] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] return evt.wait() [ 1345.882399] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1345.882399] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] result = hub.switch() [ 1345.882399] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1345.882399] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] return self.greenlet.switch() [ 1345.882399] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1345.882399] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] result = func(*self.args, **self.kw) [ 1345.882668] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1345.882668] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] result = f(*args, **kwargs) [ 1345.882668] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1345.882668] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] self._deallocate_network( [ 1345.882668] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1345.882668] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] self.network_api.deallocate_for_instance( [ 1345.882668] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1345.882668] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] data = neutron.list_ports(**search_opts) [ 1345.882668] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1345.882668] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] ret = obj(*args, **kwargs) [ 1345.882668] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1345.882668] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] return self.list('ports', self.ports_path, retrieve_all, [ 1345.882668] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1345.882991] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] ret = obj(*args, **kwargs) [ 1345.882991] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1345.882991] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] for r in self._pagination(collection, path, **params): [ 1345.882991] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1345.882991] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] res = self.get(path, params=params) [ 1345.882991] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1345.882991] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] ret = obj(*args, **kwargs) [ 1345.882991] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1345.882991] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] return self.retry_request("GET", action, body=body, [ 1345.882991] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1345.882991] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] ret = obj(*args, **kwargs) [ 1345.882991] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1345.882991] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] return self.do_request(method, action, body=body, [ 1345.883320] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1345.883320] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] ret = obj(*args, **kwargs) [ 1345.883320] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1345.883320] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] self._handle_fault_response(status_code, replybody, resp) [ 1345.883320] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1345.883320] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1345.883320] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1345.883320] env[68040]: ERROR nova.compute.manager [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] [ 1345.884193] env[68040]: DEBUG nova.compute.manager [None req-3059098d-38cc-47b4-a221-522a20c890e9 tempest-InstanceActionsNegativeTestJSON-1151606230 tempest-InstanceActionsNegativeTestJSON-1151606230-project-member] [instance: 462c8f10-1dda-4687-946c-fb40c3e4f049] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1345.907508] env[68040]: DEBUG oslo_concurrency.lockutils [None req-73f9695a-d179-44c6-ab90-76938bbaa53b tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Lock "de1b8ef9-0088-4d2a-985e-d04fcff55d31" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.377s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1345.908495] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "de1b8ef9-0088-4d2a-985e-d04fcff55d31" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 134.720s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1345.908677] env[68040]: INFO nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] During sync_power_state the instance has a pending task (deleting). Skip. [ 1345.908853] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "de1b8ef9-0088-4d2a-985e-d04fcff55d31" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1345.911834] env[68040]: DEBUG nova.compute.manager [None req-3059098d-38cc-47b4-a221-522a20c890e9 tempest-InstanceActionsNegativeTestJSON-1151606230 tempest-InstanceActionsNegativeTestJSON-1151606230-project-member] [instance: 462c8f10-1dda-4687-946c-fb40c3e4f049] Instance disappeared before build. {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1345.933547] env[68040]: DEBUG oslo_concurrency.lockutils [None req-3059098d-38cc-47b4-a221-522a20c890e9 tempest-InstanceActionsNegativeTestJSON-1151606230 tempest-InstanceActionsNegativeTestJSON-1151606230-project-member] Lock "462c8f10-1dda-4687-946c-fb40c3e4f049" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 199.358s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1345.946326] env[68040]: DEBUG nova.compute.manager [None req-4dd95105-af05-40ac-9d1e-c06ea3a4394f tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: e54d02e5-1e98-4e9d-93e7-bcccfa3307e1] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1345.952725] env[68040]: DEBUG oslo_concurrency.lockutils [None req-951941ec-b7a2-45a6-a027-60dc5719924c tempest-ServersListShow296Test-218947232 tempest-ServersListShow296Test-218947232-project-member] Acquiring lock "d17db434-040f-4859-913e-bfd658be14b3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1345.952952] env[68040]: DEBUG oslo_concurrency.lockutils [None req-951941ec-b7a2-45a6-a027-60dc5719924c tempest-ServersListShow296Test-218947232 tempest-ServersListShow296Test-218947232-project-member] Lock "d17db434-040f-4859-913e-bfd658be14b3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1345.974707] env[68040]: INFO nova.compute.manager [None req-73f9695a-d179-44c6-ab90-76938bbaa53b tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] [instance: de1b8ef9-0088-4d2a-985e-d04fcff55d31] Successfully reverted task state from None on failure for instance. [ 1345.978166] env[68040]: ERROR oslo_messaging.rpc.server [None req-73f9695a-d179-44c6-ab90-76938bbaa53b tempest-ServerDiagnosticsV248Test-1374976813 tempest-ServerDiagnosticsV248Test-1374976813-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1345.978166] env[68040]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1345.978166] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1345.978166] env[68040]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1345.978166] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1345.978166] env[68040]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1345.978166] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1345.978166] env[68040]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1345.978166] env[68040]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1345.978166] env[68040]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-4007a6f1-a9c8-4ec9-8e31-50e1fe775a48'] [ 1345.978166] env[68040]: ERROR oslo_messaging.rpc.server [ 1345.978166] env[68040]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1345.978166] env[68040]: ERROR oslo_messaging.rpc.server [ 1345.978166] env[68040]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1345.978166] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1345.978510] env[68040]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1345.978510] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1345.978510] env[68040]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1345.978510] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1345.978510] env[68040]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1345.978510] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1345.978510] env[68040]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1345.978510] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1345.978510] env[68040]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1345.978510] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1345.978510] env[68040]: ERROR oslo_messaging.rpc.server raise self.value [ 1345.978510] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1345.978510] env[68040]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1345.978510] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1345.978510] env[68040]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1345.978510] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1345.978510] env[68040]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1345.978510] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1345.978917] env[68040]: ERROR oslo_messaging.rpc.server raise self.value [ 1345.978917] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1345.978917] env[68040]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1345.978917] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1345.978917] env[68040]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1345.978917] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1345.978917] env[68040]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1345.978917] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1345.978917] env[68040]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1345.978917] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1345.978917] env[68040]: ERROR oslo_messaging.rpc.server raise self.value [ 1345.978917] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1345.978917] env[68040]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1345.978917] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3344, in terminate_instance [ 1345.978917] env[68040]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1345.978917] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1345.978917] env[68040]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1345.978917] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3339, in do_terminate_instance [ 1345.979442] env[68040]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1345.979442] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1345.979442] env[68040]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1345.979442] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1345.979442] env[68040]: ERROR oslo_messaging.rpc.server raise self.value [ 1345.979442] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1345.979442] env[68040]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1345.979442] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1345.979442] env[68040]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1345.979442] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1345.979442] env[68040]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1345.979442] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1345.979442] env[68040]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1345.979442] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1345.979442] env[68040]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1345.979442] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1345.979442] env[68040]: ERROR oslo_messaging.rpc.server raise self.value [ 1345.979442] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1345.979950] env[68040]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1345.979950] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1345.979950] env[68040]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1345.979950] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1345.979950] env[68040]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1345.979950] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1345.979950] env[68040]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1345.979950] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1345.979950] env[68040]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1345.979950] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1345.979950] env[68040]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1345.979950] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1345.979950] env[68040]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1345.979950] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1345.979950] env[68040]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1345.979950] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1345.979950] env[68040]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1345.979950] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1345.980388] env[68040]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1345.980388] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1345.980388] env[68040]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1345.980388] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1345.980388] env[68040]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1345.980388] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1345.980388] env[68040]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1345.980388] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1345.980388] env[68040]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1345.980388] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1345.980388] env[68040]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1345.980388] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1345.980388] env[68040]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1345.980388] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1345.980388] env[68040]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1345.980388] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1345.980388] env[68040]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1345.980388] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1345.980797] env[68040]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1345.980797] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1345.980797] env[68040]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1345.980797] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1345.980797] env[68040]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1345.980797] env[68040]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1345.980797] env[68040]: ERROR oslo_messaging.rpc.server [ 1346.007291] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4dd95105-af05-40ac-9d1e-c06ea3a4394f tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1346.007563] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4dd95105-af05-40ac-9d1e-c06ea3a4394f tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1346.009699] env[68040]: INFO nova.compute.claims [None req-4dd95105-af05-40ac-9d1e-c06ea3a4394f tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: e54d02e5-1e98-4e9d-93e7-bcccfa3307e1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1346.336247] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5be4e48c-7909-47d6-abb2-82eb08b294d6 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.344039] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16069518-68c7-4dd6-b9ea-96df5ad198c2 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.374533] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a75ce0cc-4292-4e9f-bb12-e731ffa1adfa {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.381601] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cdb42b4-8372-433c-b67b-6e4373bf42aa {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.394398] env[68040]: DEBUG nova.compute.provider_tree [None req-4dd95105-af05-40ac-9d1e-c06ea3a4394f tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1346.403362] env[68040]: DEBUG nova.scheduler.client.report [None req-4dd95105-af05-40ac-9d1e-c06ea3a4394f tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1346.411527] env[68040]: DEBUG oslo_concurrency.lockutils [None req-8891cb67-ad0f-454e-8827-0cc9f582daed tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Acquiring lock "e54d02e5-1e98-4e9d-93e7-bcccfa3307e1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1346.416526] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4dd95105-af05-40ac-9d1e-c06ea3a4394f tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.409s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1346.416981] env[68040]: DEBUG nova.compute.manager [None req-4dd95105-af05-40ac-9d1e-c06ea3a4394f tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: e54d02e5-1e98-4e9d-93e7-bcccfa3307e1] Start building networks asynchronously for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1346.446309] env[68040]: DEBUG nova.compute.claims [None req-4dd95105-af05-40ac-9d1e-c06ea3a4394f tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: e54d02e5-1e98-4e9d-93e7-bcccfa3307e1] Aborting claim: {{(pid=68040) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1346.446500] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4dd95105-af05-40ac-9d1e-c06ea3a4394f tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1346.446721] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4dd95105-af05-40ac-9d1e-c06ea3a4394f tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1346.733508] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40cb84b7-010c-425f-a37f-a4f4c79fe30d {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.741145] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bc892c8-cf6d-4e75-8a3a-7d4ce1163493 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.772195] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25591370-5416-4ae8-a8a4-8daedc856d07 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.779437] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f97d933-6e98-4d2a-b04a-88e5192abd93 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.792908] env[68040]: DEBUG nova.compute.provider_tree [None req-4dd95105-af05-40ac-9d1e-c06ea3a4394f tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1346.801871] env[68040]: DEBUG nova.scheduler.client.report [None req-4dd95105-af05-40ac-9d1e-c06ea3a4394f tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1346.815389] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4dd95105-af05-40ac-9d1e-c06ea3a4394f tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.369s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1346.816062] env[68040]: DEBUG nova.compute.utils [None req-4dd95105-af05-40ac-9d1e-c06ea3a4394f tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: e54d02e5-1e98-4e9d-93e7-bcccfa3307e1] Conflict updating instance e54d02e5-1e98-4e9d-93e7-bcccfa3307e1. Expected: {'task_state': [None]}. Actual: {'task_state': 'deleting'} {{(pid=68040) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1346.817425] env[68040]: DEBUG nova.compute.manager [None req-4dd95105-af05-40ac-9d1e-c06ea3a4394f tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: e54d02e5-1e98-4e9d-93e7-bcccfa3307e1] Instance disappeared during build. {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2504}} [ 1346.817600] env[68040]: DEBUG nova.compute.manager [None req-4dd95105-af05-40ac-9d1e-c06ea3a4394f tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: e54d02e5-1e98-4e9d-93e7-bcccfa3307e1] Unplugging VIFs for instance {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1346.817818] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4dd95105-af05-40ac-9d1e-c06ea3a4394f tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Acquiring lock "refresh_cache-e54d02e5-1e98-4e9d-93e7-bcccfa3307e1" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1346.817962] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4dd95105-af05-40ac-9d1e-c06ea3a4394f tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Acquired lock "refresh_cache-e54d02e5-1e98-4e9d-93e7-bcccfa3307e1" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1346.818137] env[68040]: DEBUG nova.network.neutron [None req-4dd95105-af05-40ac-9d1e-c06ea3a4394f tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: e54d02e5-1e98-4e9d-93e7-bcccfa3307e1] Building network info cache for instance {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1346.842104] env[68040]: DEBUG nova.network.neutron [None req-4dd95105-af05-40ac-9d1e-c06ea3a4394f tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: e54d02e5-1e98-4e9d-93e7-bcccfa3307e1] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1346.972151] env[68040]: DEBUG nova.network.neutron [None req-4dd95105-af05-40ac-9d1e-c06ea3a4394f tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: e54d02e5-1e98-4e9d-93e7-bcccfa3307e1] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1346.981737] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4dd95105-af05-40ac-9d1e-c06ea3a4394f tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Releasing lock "refresh_cache-e54d02e5-1e98-4e9d-93e7-bcccfa3307e1" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1346.981955] env[68040]: DEBUG nova.compute.manager [None req-4dd95105-af05-40ac-9d1e-c06ea3a4394f tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1346.982172] env[68040]: DEBUG nova.compute.manager [None req-4dd95105-af05-40ac-9d1e-c06ea3a4394f tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: e54d02e5-1e98-4e9d-93e7-bcccfa3307e1] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1346.982349] env[68040]: DEBUG nova.network.neutron [None req-4dd95105-af05-40ac-9d1e-c06ea3a4394f tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: e54d02e5-1e98-4e9d-93e7-bcccfa3307e1] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1346.998579] env[68040]: DEBUG nova.network.neutron [None req-4dd95105-af05-40ac-9d1e-c06ea3a4394f tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: e54d02e5-1e98-4e9d-93e7-bcccfa3307e1] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1347.006682] env[68040]: DEBUG nova.network.neutron [None req-4dd95105-af05-40ac-9d1e-c06ea3a4394f tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: e54d02e5-1e98-4e9d-93e7-bcccfa3307e1] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1347.014257] env[68040]: INFO nova.compute.manager [None req-4dd95105-af05-40ac-9d1e-c06ea3a4394f tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: e54d02e5-1e98-4e9d-93e7-bcccfa3307e1] Took 0.03 seconds to deallocate network for instance. [ 1347.083014] env[68040]: INFO nova.scheduler.client.report [None req-4dd95105-af05-40ac-9d1e-c06ea3a4394f tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Deleted allocations for instance e54d02e5-1e98-4e9d-93e7-bcccfa3307e1 [ 1347.083301] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4dd95105-af05-40ac-9d1e-c06ea3a4394f tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Lock "e54d02e5-1e98-4e9d-93e7-bcccfa3307e1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 196.859s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1347.084343] env[68040]: DEBUG oslo_concurrency.lockutils [None req-8891cb67-ad0f-454e-8827-0cc9f582daed tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Lock "e54d02e5-1e98-4e9d-93e7-bcccfa3307e1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.673s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1347.084563] env[68040]: DEBUG oslo_concurrency.lockutils [None req-8891cb67-ad0f-454e-8827-0cc9f582daed tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Acquiring lock "e54d02e5-1e98-4e9d-93e7-bcccfa3307e1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1347.084776] env[68040]: DEBUG oslo_concurrency.lockutils [None req-8891cb67-ad0f-454e-8827-0cc9f582daed tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Lock "e54d02e5-1e98-4e9d-93e7-bcccfa3307e1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1347.084929] env[68040]: DEBUG oslo_concurrency.lockutils [None req-8891cb67-ad0f-454e-8827-0cc9f582daed tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Lock "e54d02e5-1e98-4e9d-93e7-bcccfa3307e1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1347.086922] env[68040]: INFO nova.compute.manager [None req-8891cb67-ad0f-454e-8827-0cc9f582daed tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: e54d02e5-1e98-4e9d-93e7-bcccfa3307e1] Terminating instance [ 1347.088459] env[68040]: DEBUG oslo_concurrency.lockutils [None req-8891cb67-ad0f-454e-8827-0cc9f582daed tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Acquiring lock "refresh_cache-e54d02e5-1e98-4e9d-93e7-bcccfa3307e1" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1347.088622] env[68040]: DEBUG oslo_concurrency.lockutils [None req-8891cb67-ad0f-454e-8827-0cc9f582daed tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Acquired lock "refresh_cache-e54d02e5-1e98-4e9d-93e7-bcccfa3307e1" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1347.088792] env[68040]: DEBUG nova.network.neutron [None req-8891cb67-ad0f-454e-8827-0cc9f582daed tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: e54d02e5-1e98-4e9d-93e7-bcccfa3307e1] Building network info cache for instance {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1347.091856] env[68040]: DEBUG nova.compute.manager [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1347.111683] env[68040]: DEBUG nova.network.neutron [None req-8891cb67-ad0f-454e-8827-0cc9f582daed tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: e54d02e5-1e98-4e9d-93e7-bcccfa3307e1] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1347.137996] env[68040]: DEBUG oslo_concurrency.lockutils [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1347.138250] env[68040]: DEBUG oslo_concurrency.lockutils [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1347.139663] env[68040]: INFO nova.compute.claims [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1347.239428] env[68040]: DEBUG nova.network.neutron [None req-8891cb67-ad0f-454e-8827-0cc9f582daed tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: e54d02e5-1e98-4e9d-93e7-bcccfa3307e1] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1347.249727] env[68040]: DEBUG oslo_concurrency.lockutils [None req-8891cb67-ad0f-454e-8827-0cc9f582daed tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Releasing lock "refresh_cache-e54d02e5-1e98-4e9d-93e7-bcccfa3307e1" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1347.250234] env[68040]: DEBUG nova.compute.manager [None req-8891cb67-ad0f-454e-8827-0cc9f582daed tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: e54d02e5-1e98-4e9d-93e7-bcccfa3307e1] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1347.250429] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-8891cb67-ad0f-454e-8827-0cc9f582daed tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: e54d02e5-1e98-4e9d-93e7-bcccfa3307e1] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1347.250952] env[68040]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cc7ad7f1-1449-4310-942a-66bff54ecc11 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.262304] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5b04b64-d3e5-44fc-a1c0-a23e2c0df16b {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.294520] env[68040]: WARNING nova.virt.vmwareapi.vmops [None req-8891cb67-ad0f-454e-8827-0cc9f582daed tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: e54d02e5-1e98-4e9d-93e7-bcccfa3307e1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e54d02e5-1e98-4e9d-93e7-bcccfa3307e1 could not be found. [ 1347.294724] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-8891cb67-ad0f-454e-8827-0cc9f582daed tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: e54d02e5-1e98-4e9d-93e7-bcccfa3307e1] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1347.294902] env[68040]: INFO nova.compute.manager [None req-8891cb67-ad0f-454e-8827-0cc9f582daed tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: e54d02e5-1e98-4e9d-93e7-bcccfa3307e1] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1347.295161] env[68040]: DEBUG oslo.service.loopingcall [None req-8891cb67-ad0f-454e-8827-0cc9f582daed tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1347.297640] env[68040]: DEBUG nova.compute.manager [-] [instance: e54d02e5-1e98-4e9d-93e7-bcccfa3307e1] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1347.297731] env[68040]: DEBUG nova.network.neutron [-] [instance: e54d02e5-1e98-4e9d-93e7-bcccfa3307e1] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1347.314443] env[68040]: DEBUG nova.network.neutron [-] [instance: e54d02e5-1e98-4e9d-93e7-bcccfa3307e1] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1347.323920] env[68040]: DEBUG nova.network.neutron [-] [instance: e54d02e5-1e98-4e9d-93e7-bcccfa3307e1] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1347.336144] env[68040]: INFO nova.compute.manager [-] [instance: e54d02e5-1e98-4e9d-93e7-bcccfa3307e1] Took 0.04 seconds to deallocate network for instance. [ 1347.448767] env[68040]: DEBUG oslo_concurrency.lockutils [None req-8891cb67-ad0f-454e-8827-0cc9f582daed tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Lock "e54d02e5-1e98-4e9d-93e7-bcccfa3307e1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.364s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1347.486043] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8da93cf-3bfa-4a5a-b391-b78e1a0c9b69 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.494513] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-586140ab-9ee2-41fe-85d8-9bad95880ccb {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.524610] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c3cd92a-aa34-4c0c-98cf-63b37b24ba00 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.532195] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c4456f8-996d-4dc0-bf61-7e2f284cf159 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.546716] env[68040]: DEBUG nova.compute.provider_tree [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1347.555751] env[68040]: DEBUG nova.scheduler.client.report [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1347.571157] env[68040]: DEBUG oslo_concurrency.lockutils [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.433s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1347.571695] env[68040]: DEBUG nova.compute.manager [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Start building networks asynchronously for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1347.607351] env[68040]: DEBUG nova.compute.utils [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Using /dev/sd instead of None {{(pid=68040) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1347.612021] env[68040]: DEBUG nova.compute.manager [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Allocating IP information in the background. {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1347.612021] env[68040]: DEBUG nova.network.neutron [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] allocate_for_instance() {{(pid=68040) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1347.620101] env[68040]: DEBUG nova.compute.manager [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Start building block device mappings for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1347.684217] env[68040]: DEBUG nova.compute.manager [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Start spawning the instance on the hypervisor. {{(pid=68040) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1347.709515] env[68040]: DEBUG nova.virt.hardware [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:59:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:59:33Z,direct_url=,disk_format='vmdk',id=8c308313-03d5-40b6-a5fe-9037e32dc76e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0770d674a39c40089de0aade9440b370',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:59:34Z,virtual_size=,visibility=), allow threads: False {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1347.709769] env[68040]: DEBUG nova.virt.hardware [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Flavor limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1347.709944] env[68040]: DEBUG nova.virt.hardware [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Image limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1347.710142] env[68040]: DEBUG nova.virt.hardware [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Flavor pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1347.710381] env[68040]: DEBUG nova.virt.hardware [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Image pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1347.710543] env[68040]: DEBUG nova.virt.hardware [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1347.710752] env[68040]: DEBUG nova.virt.hardware [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1347.710914] env[68040]: DEBUG nova.virt.hardware [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1347.715029] env[68040]: DEBUG nova.virt.hardware [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Got 1 possible topologies {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1347.715029] env[68040]: DEBUG nova.virt.hardware [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1347.715029] env[68040]: DEBUG nova.virt.hardware [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1347.715029] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f4bfa6d-aa00-496f-a8fb-98679759561c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.720383] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbb37595-2394-4149-bd0d-f14847f82ddf {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.737328] env[68040]: DEBUG nova.policy [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd5b784bb2384457e9bcc4e9ff02ea850', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9a2c3ee9bf1c40228a089e4b0e5bff00', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68040) authorize /opt/stack/nova/nova/policy.py:203}} [ 1348.119128] env[68040]: DEBUG nova.network.neutron [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Successfully created port: fdbb1a32-ab98-4c32-96b4-06eacc1bc62c {{(pid=68040) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1353.190869] env[68040]: DEBUG nova.network.neutron [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Successfully updated port: fdbb1a32-ab98-4c32-96b4-06eacc1bc62c {{(pid=68040) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1353.202706] env[68040]: DEBUG oslo_concurrency.lockutils [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Acquiring lock "refresh_cache-4a08d3e3-5e84-4f34-b418-2c18eadbef25" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1353.203634] env[68040]: DEBUG oslo_concurrency.lockutils [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Acquired lock "refresh_cache-4a08d3e3-5e84-4f34-b418-2c18eadbef25" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1353.203817] env[68040]: DEBUG nova.network.neutron [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Building network info cache for instance {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1353.286625] env[68040]: DEBUG nova.network.neutron [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1353.368833] env[68040]: DEBUG nova.compute.manager [req-85a7c5fd-e927-4cc1-9a7c-3483734de9a3 req-2ebe2246-12e2-41b6-bfc4-7ccbae2fb9dd service nova] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Received event network-vif-plugged-fdbb1a32-ab98-4c32-96b4-06eacc1bc62c {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1353.369091] env[68040]: DEBUG oslo_concurrency.lockutils [req-85a7c5fd-e927-4cc1-9a7c-3483734de9a3 req-2ebe2246-12e2-41b6-bfc4-7ccbae2fb9dd service nova] Acquiring lock "4a08d3e3-5e84-4f34-b418-2c18eadbef25-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1353.369333] env[68040]: DEBUG oslo_concurrency.lockutils [req-85a7c5fd-e927-4cc1-9a7c-3483734de9a3 req-2ebe2246-12e2-41b6-bfc4-7ccbae2fb9dd service nova] Lock "4a08d3e3-5e84-4f34-b418-2c18eadbef25-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1353.369512] env[68040]: DEBUG oslo_concurrency.lockutils [req-85a7c5fd-e927-4cc1-9a7c-3483734de9a3 req-2ebe2246-12e2-41b6-bfc4-7ccbae2fb9dd service nova] Lock "4a08d3e3-5e84-4f34-b418-2c18eadbef25-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1353.369686] env[68040]: DEBUG nova.compute.manager [req-85a7c5fd-e927-4cc1-9a7c-3483734de9a3 req-2ebe2246-12e2-41b6-bfc4-7ccbae2fb9dd service nova] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] No waiting events found dispatching network-vif-plugged-fdbb1a32-ab98-4c32-96b4-06eacc1bc62c {{(pid=68040) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1353.369855] env[68040]: WARNING nova.compute.manager [req-85a7c5fd-e927-4cc1-9a7c-3483734de9a3 req-2ebe2246-12e2-41b6-bfc4-7ccbae2fb9dd service nova] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Received unexpected event network-vif-plugged-fdbb1a32-ab98-4c32-96b4-06eacc1bc62c for instance with vm_state building and task_state spawning. [ 1353.612104] env[68040]: DEBUG nova.network.neutron [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Updating instance_info_cache with network_info: [{"id": "fdbb1a32-ab98-4c32-96b4-06eacc1bc62c", "address": "fa:16:3e:56:bb:bb", "network": {"id": "9565e3df-4a40-4611-a5a9-efd2bc66053b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-780365588-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9a2c3ee9bf1c40228a089e4b0e5bff00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfdbb1a32-ab", "ovs_interfaceid": "fdbb1a32-ab98-4c32-96b4-06eacc1bc62c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1353.622627] env[68040]: DEBUG oslo_concurrency.lockutils [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Releasing lock "refresh_cache-4a08d3e3-5e84-4f34-b418-2c18eadbef25" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1353.622904] env[68040]: DEBUG nova.compute.manager [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Instance network_info: |[{"id": "fdbb1a32-ab98-4c32-96b4-06eacc1bc62c", "address": "fa:16:3e:56:bb:bb", "network": {"id": "9565e3df-4a40-4611-a5a9-efd2bc66053b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-780365588-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9a2c3ee9bf1c40228a089e4b0e5bff00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfdbb1a32-ab", "ovs_interfaceid": "fdbb1a32-ab98-4c32-96b4-06eacc1bc62c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1353.623311] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:56:bb:bb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7aa6264-122d-4c35-82d0-860e451538ea', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fdbb1a32-ab98-4c32-96b4-06eacc1bc62c', 'vif_model': 'vmxnet3'}] {{(pid=68040) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1353.630755] env[68040]: DEBUG oslo.service.loopingcall [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1353.631389] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Creating VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1353.631617] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7e48fc1f-3326-4655-b842-f52ecbb0e391 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.651304] env[68040]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1353.651304] env[68040]: value = "task-3200276" [ 1353.651304] env[68040]: _type = "Task" [ 1353.651304] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.658714] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200276, 'name': CreateVM_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.163029] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200276, 'name': CreateVM_Task} progress is 25%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.661462] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200276, 'name': CreateVM_Task} progress is 25%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.162072] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200276, 'name': CreateVM_Task, 'duration_secs': 1.4595} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1355.162205] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Created VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1355.162867] env[68040]: DEBUG oslo_concurrency.lockutils [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1355.163051] env[68040]: DEBUG oslo_concurrency.lockutils [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1355.163386] env[68040]: DEBUG oslo_concurrency.lockutils [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1355.163624] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c4d696f3-239e-4dbc-aa36-9d88ce9ab33c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.167942] env[68040]: DEBUG oslo_vmware.api [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Waiting for the task: (returnval){ [ 1355.167942] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52ab7671-3804-177b-3206-ee564a516fc4" [ 1355.167942] env[68040]: _type = "Task" [ 1355.167942] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1355.175166] env[68040]: DEBUG oslo_vmware.api [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52ab7671-3804-177b-3206-ee564a516fc4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.440723] env[68040]: DEBUG nova.compute.manager [req-3db9028c-c497-4348-8f7f-a20473499dc3 req-f4bc808b-a62d-4e39-afac-9828143674a8 service nova] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Received event network-changed-fdbb1a32-ab98-4c32-96b4-06eacc1bc62c {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1355.440922] env[68040]: DEBUG nova.compute.manager [req-3db9028c-c497-4348-8f7f-a20473499dc3 req-f4bc808b-a62d-4e39-afac-9828143674a8 service nova] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Refreshing instance network info cache due to event network-changed-fdbb1a32-ab98-4c32-96b4-06eacc1bc62c. {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1355.441160] env[68040]: DEBUG oslo_concurrency.lockutils [req-3db9028c-c497-4348-8f7f-a20473499dc3 req-f4bc808b-a62d-4e39-afac-9828143674a8 service nova] Acquiring lock "refresh_cache-4a08d3e3-5e84-4f34-b418-2c18eadbef25" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1355.441301] env[68040]: DEBUG oslo_concurrency.lockutils [req-3db9028c-c497-4348-8f7f-a20473499dc3 req-f4bc808b-a62d-4e39-afac-9828143674a8 service nova] Acquired lock "refresh_cache-4a08d3e3-5e84-4f34-b418-2c18eadbef25" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1355.441483] env[68040]: DEBUG nova.network.neutron [req-3db9028c-c497-4348-8f7f-a20473499dc3 req-f4bc808b-a62d-4e39-afac-9828143674a8 service nova] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Refreshing network info cache for port fdbb1a32-ab98-4c32-96b4-06eacc1bc62c {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1355.678604] env[68040]: DEBUG oslo_concurrency.lockutils [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1355.678604] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Processing image 8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1355.678604] env[68040]: DEBUG oslo_concurrency.lockutils [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1355.796933] env[68040]: DEBUG nova.network.neutron [req-3db9028c-c497-4348-8f7f-a20473499dc3 req-f4bc808b-a62d-4e39-afac-9828143674a8 service nova] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Updated VIF entry in instance network info cache for port fdbb1a32-ab98-4c32-96b4-06eacc1bc62c. {{(pid=68040) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1355.797614] env[68040]: DEBUG nova.network.neutron [req-3db9028c-c497-4348-8f7f-a20473499dc3 req-f4bc808b-a62d-4e39-afac-9828143674a8 service nova] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Updating instance_info_cache with network_info: [{"id": "fdbb1a32-ab98-4c32-96b4-06eacc1bc62c", "address": "fa:16:3e:56:bb:bb", "network": {"id": "9565e3df-4a40-4611-a5a9-efd2bc66053b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-780365588-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9a2c3ee9bf1c40228a089e4b0e5bff00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfdbb1a32-ab", "ovs_interfaceid": "fdbb1a32-ab98-4c32-96b4-06eacc1bc62c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1355.808150] env[68040]: DEBUG oslo_concurrency.lockutils [req-3db9028c-c497-4348-8f7f-a20473499dc3 req-f4bc808b-a62d-4e39-afac-9828143674a8 service nova] Releasing lock "refresh_cache-4a08d3e3-5e84-4f34-b418-2c18eadbef25" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1387.988230] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1388.983607] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1388.996095] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1388.996375] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1388.996513] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1388.996674] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68040) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1388.997851] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecf3ea35-c7f8-44e4-9766-f40c01da273a {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.006605] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d22dfc83-2c04-4b4d-8eab-3d2c0f92c610 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.020609] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13a101c6-f48b-44b2-8189-1e739ff02503 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.026847] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21a759f2-75b4-42cf-839a-1fb2a902da3c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.054740] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180971MB free_disk=125GB free_vcpus=48 pci_devices=None {{(pid=68040) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1389.054879] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1389.055079] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1389.129028] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance bce68a2b-260c-45cc-ac98-d4b01b4513a4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1389.129028] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 1e43f6be-f6a3-4569-adea-c82a5d709247 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1389.129028] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 3738de32-79cd-4b04-8081-cc1146730c75 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1389.129028] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1389.129241] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1389.129241] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance e92b662c-b458-49d8-ac2a-00ae6046a11b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1389.129241] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 57cd94c2-aec3-427e-9b9f-a444fe291974 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1389.129241] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 87a7851e-d6fe-481a-8abb-5732e281cb64 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1389.129376] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance c581d685-7ea0-41f8-b911-ff1dce1b46c7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1389.129376] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 4a08d3e3-5e84-4f34-b418-2c18eadbef25 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1389.139640] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 268b5613-b132-49ed-a45b-bc88132177cf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1389.149858] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 4dfa01f8-53a0-4ee4-9b00-93017144ea0b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1389.158691] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 4ce0934f-8277-4029-8a0c-77468ee9b6dc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1389.167682] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 09489d57-c6c1-4ac2-9c14-1a190172970c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1389.176569] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance b5def543-2cbf-4ecc-b492-3607e5e74e38 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1389.185283] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1389.194261] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance f9d3a788-4f1b-46f7-83ab-dd6884f68d2e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1389.203270] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 83475c46-38de-4918-91b4-b53dcf3ead77 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1389.212189] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 6541b54b-214d-432c-8ae6-5de4ed99390f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1389.221145] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance d8f18a76-588b-4329-a167-2a571f82455f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1389.229967] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 285249df-f5b4-4a68-89fe-9281fe1573e5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1389.238439] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance d17db434-040f-4859-913e-bfd658be14b3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1389.238666] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1389.238815] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1389.483264] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-280e8d5e-22a0-44ee-9e21-0947a52f280d {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.491091] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28bb777e-cf09-48af-80c7-65b553a9a680 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.522092] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e3e952d-89d3-43b1-8c7a-ab82de63aa2e {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.529564] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56b05167-28fc-433d-8cf5-f824da0bb3db {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.542378] env[68040]: DEBUG nova.compute.provider_tree [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1389.551968] env[68040]: DEBUG nova.scheduler.client.report [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1389.569329] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68040) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1389.569504] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.514s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1390.570396] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1390.984347] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1390.984602] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Starting heal instance info cache {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1390.984738] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Rebuilding the list of instances to heal {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1391.005455] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1391.005617] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1391.005741] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1391.005868] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1391.005990] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1391.006129] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1391.006251] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1391.006369] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1391.006487] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1391.006603] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1391.006728] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Didn't find any instances for network info cache update. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1391.007247] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1391.007454] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1391.984426] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1393.979421] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1394.776692] env[68040]: WARNING oslo_vmware.rw_handles [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1394.776692] env[68040]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1394.776692] env[68040]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1394.776692] env[68040]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1394.776692] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1394.776692] env[68040]: ERROR oslo_vmware.rw_handles response.begin() [ 1394.776692] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1394.776692] env[68040]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1394.776692] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1394.776692] env[68040]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1394.776692] env[68040]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1394.776692] env[68040]: ERROR oslo_vmware.rw_handles [ 1394.777166] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Downloaded image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to vmware_temp/2d2d21fc-f8fc-4552-a6eb-d37be35d5d66/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1394.779065] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Caching image {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1394.779325] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Copying Virtual Disk [datastore2] vmware_temp/2d2d21fc-f8fc-4552-a6eb-d37be35d5d66/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk to [datastore2] vmware_temp/2d2d21fc-f8fc-4552-a6eb-d37be35d5d66/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk {{(pid=68040) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1394.779739] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eb548122-55f7-4d62-91e2-9a7b9fcbdbb6 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.788363] env[68040]: DEBUG oslo_vmware.api [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Waiting for the task: (returnval){ [ 1394.788363] env[68040]: value = "task-3200277" [ 1394.788363] env[68040]: _type = "Task" [ 1394.788363] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.795976] env[68040]: DEBUG oslo_vmware.api [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Task: {'id': task-3200277, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.299070] env[68040]: DEBUG oslo_vmware.exceptions [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Fault InvalidArgument not matched. {{(pid=68040) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1395.299412] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1395.299912] env[68040]: ERROR nova.compute.manager [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1395.299912] env[68040]: Faults: ['InvalidArgument'] [ 1395.299912] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Traceback (most recent call last): [ 1395.299912] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1395.299912] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] yield resources [ 1395.299912] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1395.299912] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] self.driver.spawn(context, instance, image_meta, [ 1395.299912] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1395.299912] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1395.299912] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1395.299912] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] self._fetch_image_if_missing(context, vi) [ 1395.299912] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1395.300282] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] image_cache(vi, tmp_image_ds_loc) [ 1395.300282] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1395.300282] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] vm_util.copy_virtual_disk( [ 1395.300282] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1395.300282] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] session._wait_for_task(vmdk_copy_task) [ 1395.300282] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1395.300282] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] return self.wait_for_task(task_ref) [ 1395.300282] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1395.300282] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] return evt.wait() [ 1395.300282] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1395.300282] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] result = hub.switch() [ 1395.300282] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1395.300282] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] return self.greenlet.switch() [ 1395.300668] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1395.300668] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] self.f(*self.args, **self.kw) [ 1395.300668] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1395.300668] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] raise exceptions.translate_fault(task_info.error) [ 1395.300668] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1395.300668] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Faults: ['InvalidArgument'] [ 1395.300668] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] [ 1395.300668] env[68040]: INFO nova.compute.manager [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Terminating instance [ 1395.301761] env[68040]: DEBUG oslo_concurrency.lockutils [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1395.301991] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1395.302240] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a575c3b6-a57a-4d0d-a5ea-83e8943b35ba {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.304289] env[68040]: DEBUG nova.compute.manager [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1395.304483] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1395.305208] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-307b305f-6a26-4823-9d9c-b33329cc87aa {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.311532] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Unregistering the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1395.311757] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-658f5199-672f-4b6d-a8f4-30282d9fa590 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.313882] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1395.314101] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68040) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1395.315031] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-888dbf51-83b2-426e-a3be-7e3c438f2e4c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.319797] env[68040]: DEBUG oslo_vmware.api [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Waiting for the task: (returnval){ [ 1395.319797] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]521a4058-2d6d-2094-89f4-766ca2bb43b3" [ 1395.319797] env[68040]: _type = "Task" [ 1395.319797] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.326966] env[68040]: DEBUG oslo_vmware.api [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]521a4058-2d6d-2094-89f4-766ca2bb43b3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.386942] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Unregistered the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1395.387186] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Deleting contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1395.387436] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Deleting the datastore file [datastore2] bce68a2b-260c-45cc-ac98-d4b01b4513a4 {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1395.387685] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c601a502-9798-4eeb-a9d3-683da8176930 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.393402] env[68040]: DEBUG oslo_vmware.api [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Waiting for the task: (returnval){ [ 1395.393402] env[68040]: value = "task-3200279" [ 1395.393402] env[68040]: _type = "Task" [ 1395.393402] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.401943] env[68040]: DEBUG oslo_vmware.api [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Task: {'id': task-3200279, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.829794] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Preparing fetch location {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1395.830085] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Creating directory with path [datastore2] vmware_temp/607a8c0b-87ab-4f84-bab8-4b52c9049eee/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1395.830321] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e57c374c-ac2f-454d-a8aa-01667e4af415 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.841191] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Created directory with path [datastore2] vmware_temp/607a8c0b-87ab-4f84-bab8-4b52c9049eee/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1395.841390] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Fetch image to [datastore2] vmware_temp/607a8c0b-87ab-4f84-bab8-4b52c9049eee/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1395.841560] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to [datastore2] vmware_temp/607a8c0b-87ab-4f84-bab8-4b52c9049eee/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1395.842352] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f4c58d9-5e08-49dd-92ce-52e5fbd2ce90 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.849123] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-445d53da-9f1b-404f-9b67-239fee32e181 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.857867] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79ace8e7-9b1b-45c7-a6d3-365bda7d4deb {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.887841] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ca00680-3513-4824-aa61-32abfb448393 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.893148] env[68040]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-968a7572-3923-4267-967d-10d2294b6d2a {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.901668] env[68040]: DEBUG oslo_vmware.api [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Task: {'id': task-3200279, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.073012} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.901891] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Deleted the datastore file {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1395.902084] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Deleted contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1395.902258] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1395.902440] env[68040]: INFO nova.compute.manager [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1395.904537] env[68040]: DEBUG nova.compute.claims [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Aborting claim: {{(pid=68040) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1395.904706] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1395.904918] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1395.915092] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1395.967631] env[68040]: DEBUG oslo_vmware.rw_handles [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/607a8c0b-87ab-4f84-bab8-4b52c9049eee/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1396.023898] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1396.024129] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68040) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1396.029084] env[68040]: DEBUG oslo_vmware.rw_handles [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Completed reading data from the image iterator. {{(pid=68040) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1396.029267] env[68040]: DEBUG oslo_vmware.rw_handles [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/607a8c0b-87ab-4f84-bab8-4b52c9049eee/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1396.246443] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-951ac8a6-7809-4f91-992a-20bcad8fe438 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.254515] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a1a5c9b-f2c5-4f51-8d8d-b2602b4ced8e {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.284218] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c1da094-721d-4f07-8e77-b0c8f98f33da {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.291136] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e59a5490-e706-4aa7-a904-666a2b2467df {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.304074] env[68040]: DEBUG nova.compute.provider_tree [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1396.314102] env[68040]: DEBUG nova.scheduler.client.report [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1396.327890] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.423s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1396.328425] env[68040]: ERROR nova.compute.manager [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1396.328425] env[68040]: Faults: ['InvalidArgument'] [ 1396.328425] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Traceback (most recent call last): [ 1396.328425] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1396.328425] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] self.driver.spawn(context, instance, image_meta, [ 1396.328425] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1396.328425] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1396.328425] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1396.328425] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] self._fetch_image_if_missing(context, vi) [ 1396.328425] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1396.328425] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] image_cache(vi, tmp_image_ds_loc) [ 1396.328425] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1396.328762] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] vm_util.copy_virtual_disk( [ 1396.328762] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1396.328762] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] session._wait_for_task(vmdk_copy_task) [ 1396.328762] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1396.328762] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] return self.wait_for_task(task_ref) [ 1396.328762] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1396.328762] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] return evt.wait() [ 1396.328762] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1396.328762] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] result = hub.switch() [ 1396.328762] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1396.328762] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] return self.greenlet.switch() [ 1396.328762] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1396.328762] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] self.f(*self.args, **self.kw) [ 1396.329112] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1396.329112] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] raise exceptions.translate_fault(task_info.error) [ 1396.329112] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1396.329112] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Faults: ['InvalidArgument'] [ 1396.329112] env[68040]: ERROR nova.compute.manager [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] [ 1396.329236] env[68040]: DEBUG nova.compute.utils [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] VimFaultException {{(pid=68040) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1396.330616] env[68040]: DEBUG nova.compute.manager [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Build of instance bce68a2b-260c-45cc-ac98-d4b01b4513a4 was re-scheduled: A specified parameter was not correct: fileType [ 1396.330616] env[68040]: Faults: ['InvalidArgument'] {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1396.330974] env[68040]: DEBUG nova.compute.manager [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Unplugging VIFs for instance {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1396.331167] env[68040]: DEBUG nova.compute.manager [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1396.331339] env[68040]: DEBUG nova.compute.manager [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1396.331504] env[68040]: DEBUG nova.network.neutron [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1396.993665] env[68040]: DEBUG nova.network.neutron [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1397.009800] env[68040]: INFO nova.compute.manager [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Took 0.67 seconds to deallocate network for instance. [ 1397.116528] env[68040]: INFO nova.scheduler.client.report [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Deleted allocations for instance bce68a2b-260c-45cc-ac98-d4b01b4513a4 [ 1397.138406] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a4d98be3-8744-48a4-bbf5-11c34adc784d tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Lock "bce68a2b-260c-45cc-ac98-d4b01b4513a4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 641.004s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1397.139597] env[68040]: DEBUG oslo_concurrency.lockutils [None req-bc1322f3-5659-4e3d-b876-c8389db71213 tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Lock "bce68a2b-260c-45cc-ac98-d4b01b4513a4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 445.119s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1397.139806] env[68040]: DEBUG oslo_concurrency.lockutils [None req-bc1322f3-5659-4e3d-b876-c8389db71213 tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Acquiring lock "bce68a2b-260c-45cc-ac98-d4b01b4513a4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1397.140064] env[68040]: DEBUG oslo_concurrency.lockutils [None req-bc1322f3-5659-4e3d-b876-c8389db71213 tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Lock "bce68a2b-260c-45cc-ac98-d4b01b4513a4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1397.140391] env[68040]: DEBUG oslo_concurrency.lockutils [None req-bc1322f3-5659-4e3d-b876-c8389db71213 tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Lock "bce68a2b-260c-45cc-ac98-d4b01b4513a4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1397.142247] env[68040]: INFO nova.compute.manager [None req-bc1322f3-5659-4e3d-b876-c8389db71213 tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Terminating instance [ 1397.143821] env[68040]: DEBUG nova.compute.manager [None req-bc1322f3-5659-4e3d-b876-c8389db71213 tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1397.144032] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-bc1322f3-5659-4e3d-b876-c8389db71213 tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1397.144489] env[68040]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a8bb9682-172d-4a19-a440-31b7124a87a3 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.149643] env[68040]: DEBUG nova.compute.manager [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1397.156717] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23b645d2-49ee-4f5a-97de-f15e63ed75fb {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.186392] env[68040]: WARNING nova.virt.vmwareapi.vmops [None req-bc1322f3-5659-4e3d-b876-c8389db71213 tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance bce68a2b-260c-45cc-ac98-d4b01b4513a4 could not be found. [ 1397.186588] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-bc1322f3-5659-4e3d-b876-c8389db71213 tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1397.186766] env[68040]: INFO nova.compute.manager [None req-bc1322f3-5659-4e3d-b876-c8389db71213 tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1397.187062] env[68040]: DEBUG oslo.service.loopingcall [None req-bc1322f3-5659-4e3d-b876-c8389db71213 tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1397.189237] env[68040]: DEBUG nova.compute.manager [-] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1397.189342] env[68040]: DEBUG nova.network.neutron [-] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1397.203704] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1397.203771] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1397.205116] env[68040]: INFO nova.compute.claims [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1397.215014] env[68040]: DEBUG nova.network.neutron [-] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1397.228631] env[68040]: INFO nova.compute.manager [-] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] Took 0.04 seconds to deallocate network for instance. [ 1397.312982] env[68040]: DEBUG oslo_concurrency.lockutils [None req-bc1322f3-5659-4e3d-b876-c8389db71213 tempest-AttachVolumeTestJSON-1788101522 tempest-AttachVolumeTestJSON-1788101522-project-member] Lock "bce68a2b-260c-45cc-ac98-d4b01b4513a4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.173s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1397.313852] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "bce68a2b-260c-45cc-ac98-d4b01b4513a4" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 186.125s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1397.314047] env[68040]: INFO nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: bce68a2b-260c-45cc-ac98-d4b01b4513a4] During sync_power_state the instance has a pending task (deleting). Skip. [ 1397.314234] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "bce68a2b-260c-45cc-ac98-d4b01b4513a4" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1397.479810] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e9bdda4-9249-4d63-98a2-c6257c0b32fc {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.487093] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4a9a558-96b7-413e-b49d-f3fa4ae499d2 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.515756] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5512eefa-a2ee-44d7-9432-a7c401873576 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.522442] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e12530ee-3563-4e0d-b69b-6749e991a720 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.534828] env[68040]: DEBUG nova.compute.provider_tree [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1397.543733] env[68040]: DEBUG nova.scheduler.client.report [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1397.556823] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.353s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1397.557290] env[68040]: DEBUG nova.compute.manager [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Start building networks asynchronously for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1397.589188] env[68040]: DEBUG nova.compute.utils [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Using /dev/sd instead of None {{(pid=68040) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1397.590621] env[68040]: DEBUG nova.compute.manager [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Allocating IP information in the background. {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1397.590814] env[68040]: DEBUG nova.network.neutron [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] allocate_for_instance() {{(pid=68040) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1397.598120] env[68040]: DEBUG nova.compute.manager [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Start building block device mappings for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1397.652090] env[68040]: DEBUG nova.policy [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd80e102965474f1eaeb720c9ccd32240', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '56cc75f0a13948a983b1eabe7887b5f8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68040) authorize /opt/stack/nova/nova/policy.py:203}} [ 1397.658429] env[68040]: DEBUG nova.compute.manager [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Start spawning the instance on the hypervisor. {{(pid=68040) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1397.683462] env[68040]: DEBUG nova.virt.hardware [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:59:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:59:33Z,direct_url=,disk_format='vmdk',id=8c308313-03d5-40b6-a5fe-9037e32dc76e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0770d674a39c40089de0aade9440b370',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:59:34Z,virtual_size=,visibility=), allow threads: False {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1397.683698] env[68040]: DEBUG nova.virt.hardware [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Flavor limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1397.683862] env[68040]: DEBUG nova.virt.hardware [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Image limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1397.684057] env[68040]: DEBUG nova.virt.hardware [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Flavor pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1397.684212] env[68040]: DEBUG nova.virt.hardware [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Image pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1397.684358] env[68040]: DEBUG nova.virt.hardware [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1397.684564] env[68040]: DEBUG nova.virt.hardware [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1397.684727] env[68040]: DEBUG nova.virt.hardware [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1397.684898] env[68040]: DEBUG nova.virt.hardware [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Got 1 possible topologies {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1397.685075] env[68040]: DEBUG nova.virt.hardware [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1397.685256] env[68040]: DEBUG nova.virt.hardware [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1397.686105] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf0221b3-18a4-4396-a7aa-dbe7a908d934 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.694073] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4a59cae-c741-456d-8354-9c7a8f49f3b4 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.271590] env[68040]: DEBUG nova.network.neutron [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Successfully created port: ea5e86ae-e795-4f6b-bdb3-ea3a1a32fc9b {{(pid=68040) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1398.345247] env[68040]: DEBUG oslo_concurrency.lockutils [None req-5bde59f3-ebee-405d-8b32-34c2acad2712 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Acquiring lock "4a08d3e3-5e84-4f34-b418-2c18eadbef25" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1399.392279] env[68040]: DEBUG nova.compute.manager [req-fe37869d-fd31-44af-8fb1-9caa99087b6a req-85e9c2a0-add1-44eb-94c2-7e0a449fe7f7 service nova] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Received event network-vif-plugged-ea5e86ae-e795-4f6b-bdb3-ea3a1a32fc9b {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1399.392584] env[68040]: DEBUG oslo_concurrency.lockutils [req-fe37869d-fd31-44af-8fb1-9caa99087b6a req-85e9c2a0-add1-44eb-94c2-7e0a449fe7f7 service nova] Acquiring lock "268b5613-b132-49ed-a45b-bc88132177cf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1399.392734] env[68040]: DEBUG oslo_concurrency.lockutils [req-fe37869d-fd31-44af-8fb1-9caa99087b6a req-85e9c2a0-add1-44eb-94c2-7e0a449fe7f7 service nova] Lock "268b5613-b132-49ed-a45b-bc88132177cf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1399.392914] env[68040]: DEBUG oslo_concurrency.lockutils [req-fe37869d-fd31-44af-8fb1-9caa99087b6a req-85e9c2a0-add1-44eb-94c2-7e0a449fe7f7 service nova] Lock "268b5613-b132-49ed-a45b-bc88132177cf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1399.393161] env[68040]: DEBUG nova.compute.manager [req-fe37869d-fd31-44af-8fb1-9caa99087b6a req-85e9c2a0-add1-44eb-94c2-7e0a449fe7f7 service nova] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] No waiting events found dispatching network-vif-plugged-ea5e86ae-e795-4f6b-bdb3-ea3a1a32fc9b {{(pid=68040) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1399.393392] env[68040]: WARNING nova.compute.manager [req-fe37869d-fd31-44af-8fb1-9caa99087b6a req-85e9c2a0-add1-44eb-94c2-7e0a449fe7f7 service nova] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Received unexpected event network-vif-plugged-ea5e86ae-e795-4f6b-bdb3-ea3a1a32fc9b for instance with vm_state building and task_state spawning. [ 1399.538023] env[68040]: DEBUG nova.network.neutron [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Successfully updated port: ea5e86ae-e795-4f6b-bdb3-ea3a1a32fc9b {{(pid=68040) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1399.545209] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Acquiring lock "refresh_cache-268b5613-b132-49ed-a45b-bc88132177cf" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1399.545375] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Acquired lock "refresh_cache-268b5613-b132-49ed-a45b-bc88132177cf" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1399.545528] env[68040]: DEBUG nova.network.neutron [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Building network info cache for instance {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1399.592356] env[68040]: DEBUG nova.network.neutron [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1399.748907] env[68040]: DEBUG nova.network.neutron [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Updating instance_info_cache with network_info: [{"id": "ea5e86ae-e795-4f6b-bdb3-ea3a1a32fc9b", "address": "fa:16:3e:aa:4f:f2", "network": {"id": "8810e692-44f2-40eb-9fbd-0a209065c053", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1843594740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "56cc75f0a13948a983b1eabe7887b5f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea5e86ae-e7", "ovs_interfaceid": "ea5e86ae-e795-4f6b-bdb3-ea3a1a32fc9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1399.759638] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Releasing lock "refresh_cache-268b5613-b132-49ed-a45b-bc88132177cf" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1399.759896] env[68040]: DEBUG nova.compute.manager [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Instance network_info: |[{"id": "ea5e86ae-e795-4f6b-bdb3-ea3a1a32fc9b", "address": "fa:16:3e:aa:4f:f2", "network": {"id": "8810e692-44f2-40eb-9fbd-0a209065c053", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1843594740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "56cc75f0a13948a983b1eabe7887b5f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea5e86ae-e7", "ovs_interfaceid": "ea5e86ae-e795-4f6b-bdb3-ea3a1a32fc9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1399.760316] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:aa:4f:f2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ea5e86ae-e795-4f6b-bdb3-ea3a1a32fc9b', 'vif_model': 'vmxnet3'}] {{(pid=68040) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1399.767856] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Creating folder: Project (56cc75f0a13948a983b1eabe7887b5f8). Parent ref: group-v639956. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1399.768695] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-04365da8-4fc8-467c-b0b0-1ffc543f35a7 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.780909] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Created folder: Project (56cc75f0a13948a983b1eabe7887b5f8) in parent group-v639956. [ 1399.780909] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Creating folder: Instances. Parent ref: group-v640036. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1399.780909] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e0aafb30-fcf5-4009-b52e-f12f20e53275 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.789396] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Created folder: Instances in parent group-v640036. [ 1399.789679] env[68040]: DEBUG oslo.service.loopingcall [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1399.789864] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Creating VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1399.790064] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5b84c3c7-faff-4af9-8c35-03301fdea426 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.808782] env[68040]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1399.808782] env[68040]: value = "task-3200282" [ 1399.808782] env[68040]: _type = "Task" [ 1399.808782] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.816050] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200282, 'name': CreateVM_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.320600] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200282, 'name': CreateVM_Task, 'duration_secs': 0.280794} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.320825] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Created VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1400.321517] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1400.321685] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1400.322009] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1400.322259] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e626036-98e7-4b0b-b6b0-33b6b3cac01f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.326884] env[68040]: DEBUG oslo_vmware.api [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Waiting for the task: (returnval){ [ 1400.326884] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52b1fc7e-0e52-342f-daa9-0f71578cc855" [ 1400.326884] env[68040]: _type = "Task" [ 1400.326884] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.334293] env[68040]: DEBUG oslo_vmware.api [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52b1fc7e-0e52-342f-daa9-0f71578cc855, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.837237] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1400.837627] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Processing image 8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1400.837681] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1401.475902] env[68040]: DEBUG nova.compute.manager [req-4f511aff-e379-406b-b9d7-967c4dd845e2 req-564ca742-3aab-4028-9d6f-516cd7247927 service nova] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Received event network-changed-ea5e86ae-e795-4f6b-bdb3-ea3a1a32fc9b {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1401.476113] env[68040]: DEBUG nova.compute.manager [req-4f511aff-e379-406b-b9d7-967c4dd845e2 req-564ca742-3aab-4028-9d6f-516cd7247927 service nova] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Refreshing instance network info cache due to event network-changed-ea5e86ae-e795-4f6b-bdb3-ea3a1a32fc9b. {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1401.476331] env[68040]: DEBUG oslo_concurrency.lockutils [req-4f511aff-e379-406b-b9d7-967c4dd845e2 req-564ca742-3aab-4028-9d6f-516cd7247927 service nova] Acquiring lock "refresh_cache-268b5613-b132-49ed-a45b-bc88132177cf" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1401.476477] env[68040]: DEBUG oslo_concurrency.lockutils [req-4f511aff-e379-406b-b9d7-967c4dd845e2 req-564ca742-3aab-4028-9d6f-516cd7247927 service nova] Acquired lock "refresh_cache-268b5613-b132-49ed-a45b-bc88132177cf" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1401.476638] env[68040]: DEBUG nova.network.neutron [req-4f511aff-e379-406b-b9d7-967c4dd845e2 req-564ca742-3aab-4028-9d6f-516cd7247927 service nova] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Refreshing network info cache for port ea5e86ae-e795-4f6b-bdb3-ea3a1a32fc9b {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1401.768926] env[68040]: DEBUG nova.network.neutron [req-4f511aff-e379-406b-b9d7-967c4dd845e2 req-564ca742-3aab-4028-9d6f-516cd7247927 service nova] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Updated VIF entry in instance network info cache for port ea5e86ae-e795-4f6b-bdb3-ea3a1a32fc9b. {{(pid=68040) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1401.769352] env[68040]: DEBUG nova.network.neutron [req-4f511aff-e379-406b-b9d7-967c4dd845e2 req-564ca742-3aab-4028-9d6f-516cd7247927 service nova] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Updating instance_info_cache with network_info: [{"id": "ea5e86ae-e795-4f6b-bdb3-ea3a1a32fc9b", "address": "fa:16:3e:aa:4f:f2", "network": {"id": "8810e692-44f2-40eb-9fbd-0a209065c053", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1843594740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "56cc75f0a13948a983b1eabe7887b5f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapea5e86ae-e7", "ovs_interfaceid": "ea5e86ae-e795-4f6b-bdb3-ea3a1a32fc9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1401.778453] env[68040]: DEBUG oslo_concurrency.lockutils [req-4f511aff-e379-406b-b9d7-967c4dd845e2 req-564ca742-3aab-4028-9d6f-516cd7247927 service nova] Releasing lock "refresh_cache-268b5613-b132-49ed-a45b-bc88132177cf" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1444.197682] env[68040]: WARNING oslo_vmware.rw_handles [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1444.197682] env[68040]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1444.197682] env[68040]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1444.197682] env[68040]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1444.197682] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1444.197682] env[68040]: ERROR oslo_vmware.rw_handles response.begin() [ 1444.197682] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1444.197682] env[68040]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1444.197682] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1444.197682] env[68040]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1444.197682] env[68040]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1444.197682] env[68040]: ERROR oslo_vmware.rw_handles [ 1444.198348] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Downloaded image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to vmware_temp/607a8c0b-87ab-4f84-bab8-4b52c9049eee/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1444.200282] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Caching image {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1444.200538] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Copying Virtual Disk [datastore2] vmware_temp/607a8c0b-87ab-4f84-bab8-4b52c9049eee/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk to [datastore2] vmware_temp/607a8c0b-87ab-4f84-bab8-4b52c9049eee/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk {{(pid=68040) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1444.200859] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fc358613-c258-4269-81c7-60b7be20a14c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.208213] env[68040]: DEBUG oslo_vmware.api [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Waiting for the task: (returnval){ [ 1444.208213] env[68040]: value = "task-3200283" [ 1444.208213] env[68040]: _type = "Task" [ 1444.208213] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1444.216275] env[68040]: DEBUG oslo_vmware.api [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Task: {'id': task-3200283, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.718208] env[68040]: DEBUG oslo_vmware.exceptions [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Fault InvalidArgument not matched. {{(pid=68040) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1444.718490] env[68040]: DEBUG oslo_concurrency.lockutils [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1444.719048] env[68040]: ERROR nova.compute.manager [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1444.719048] env[68040]: Faults: ['InvalidArgument'] [ 1444.719048] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Traceback (most recent call last): [ 1444.719048] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1444.719048] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] yield resources [ 1444.719048] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1444.719048] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] self.driver.spawn(context, instance, image_meta, [ 1444.719048] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1444.719048] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1444.719048] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1444.719048] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] self._fetch_image_if_missing(context, vi) [ 1444.719048] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1444.719355] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] image_cache(vi, tmp_image_ds_loc) [ 1444.719355] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1444.719355] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] vm_util.copy_virtual_disk( [ 1444.719355] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1444.719355] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] session._wait_for_task(vmdk_copy_task) [ 1444.719355] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1444.719355] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] return self.wait_for_task(task_ref) [ 1444.719355] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1444.719355] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] return evt.wait() [ 1444.719355] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1444.719355] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] result = hub.switch() [ 1444.719355] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1444.719355] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] return self.greenlet.switch() [ 1444.719849] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1444.719849] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] self.f(*self.args, **self.kw) [ 1444.719849] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1444.719849] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] raise exceptions.translate_fault(task_info.error) [ 1444.719849] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1444.719849] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Faults: ['InvalidArgument'] [ 1444.719849] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] [ 1444.719849] env[68040]: INFO nova.compute.manager [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Terminating instance [ 1444.720951] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1444.721187] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1444.721424] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-920c8a3e-215a-433f-9d94-349079201e61 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.723786] env[68040]: DEBUG nova.compute.manager [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1444.723989] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1444.724715] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0874c3e8-c05f-4c54-820b-357ce276af5c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.731429] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Unregistering the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1444.731690] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2a1f1bed-38c7-40f3-bea0-eb27cd344285 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.733802] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1444.733983] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68040) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1444.734952] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a4c2d6a-3f55-48ac-9eca-1254e24ca455 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.739774] env[68040]: DEBUG oslo_vmware.api [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Waiting for the task: (returnval){ [ 1444.739774] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52519c86-e6ca-d80b-d750-33053c22a599" [ 1444.739774] env[68040]: _type = "Task" [ 1444.739774] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1444.753466] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Preparing fetch location {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1444.753696] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Creating directory with path [datastore2] vmware_temp/864af2cd-a70d-4d6d-924a-afc28973bb58/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1444.753903] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0fae8181-4f7b-4466-babd-d48c39f7bfce {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.775019] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Created directory with path [datastore2] vmware_temp/864af2cd-a70d-4d6d-924a-afc28973bb58/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1444.775019] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Fetch image to [datastore2] vmware_temp/864af2cd-a70d-4d6d-924a-afc28973bb58/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1444.775176] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to [datastore2] vmware_temp/864af2cd-a70d-4d6d-924a-afc28973bb58/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1444.775892] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c0efe05-659f-4fb2-a9c1-e8840fcbdf3c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.782519] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1c8c5c7-f701-4f3c-923c-2f0a18afb88c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.792952] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d56f792b-1ce6-4011-9792-1137463a68ee {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.827013] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90e1fff8-aef5-4d36-8a28-1dc07aafd6ae {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.829780] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Unregistered the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1444.830096] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Deleting contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1444.830288] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Deleting the datastore file [datastore2] 1e43f6be-f6a3-4569-adea-c82a5d709247 {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1444.830516] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-72c019c9-0613-4ed4-bf0b-a7f9a2ad1118 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.835833] env[68040]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-aaf61214-b378-452c-9b1b-e2cfcac2a9e2 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.837483] env[68040]: DEBUG oslo_vmware.api [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Waiting for the task: (returnval){ [ 1444.837483] env[68040]: value = "task-3200285" [ 1444.837483] env[68040]: _type = "Task" [ 1444.837483] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1444.844931] env[68040]: DEBUG oslo_vmware.api [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Task: {'id': task-3200285, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.858765] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1444.912338] env[68040]: DEBUG oslo_vmware.rw_handles [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/864af2cd-a70d-4d6d-924a-afc28973bb58/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1444.974716] env[68040]: DEBUG oslo_vmware.rw_handles [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Completed reading data from the image iterator. {{(pid=68040) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1444.974919] env[68040]: DEBUG oslo_vmware.rw_handles [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/864af2cd-a70d-4d6d-924a-afc28973bb58/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1445.349129] env[68040]: DEBUG oslo_vmware.api [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Task: {'id': task-3200285, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.079751} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1445.349129] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Deleted the datastore file {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1445.349129] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Deleted contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1445.349129] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1445.349129] env[68040]: INFO nova.compute.manager [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1445.351812] env[68040]: DEBUG nova.compute.claims [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Aborting claim: {{(pid=68040) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1445.351991] env[68040]: DEBUG oslo_concurrency.lockutils [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1445.352237] env[68040]: DEBUG oslo_concurrency.lockutils [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1445.634135] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b6bd4ca-a4c3-42d8-914d-86afc39decd1 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.641477] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5227c66f-7ef6-4194-a67d-fed395b22a16 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.672037] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-446781b2-81d7-414f-910b-b4b4287560ed {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.678700] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53963da1-3ba2-47d8-8e8c-0f6e9a452524 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.691417] env[68040]: DEBUG nova.compute.provider_tree [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1445.699760] env[68040]: DEBUG nova.scheduler.client.report [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1445.716194] env[68040]: DEBUG oslo_concurrency.lockutils [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.364s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1445.716705] env[68040]: ERROR nova.compute.manager [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1445.716705] env[68040]: Faults: ['InvalidArgument'] [ 1445.716705] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Traceback (most recent call last): [ 1445.716705] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1445.716705] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] self.driver.spawn(context, instance, image_meta, [ 1445.716705] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1445.716705] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1445.716705] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1445.716705] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] self._fetch_image_if_missing(context, vi) [ 1445.716705] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1445.716705] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] image_cache(vi, tmp_image_ds_loc) [ 1445.716705] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1445.717011] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] vm_util.copy_virtual_disk( [ 1445.717011] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1445.717011] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] session._wait_for_task(vmdk_copy_task) [ 1445.717011] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1445.717011] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] return self.wait_for_task(task_ref) [ 1445.717011] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1445.717011] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] return evt.wait() [ 1445.717011] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1445.717011] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] result = hub.switch() [ 1445.717011] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1445.717011] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] return self.greenlet.switch() [ 1445.717011] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1445.717011] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] self.f(*self.args, **self.kw) [ 1445.717290] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1445.717290] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] raise exceptions.translate_fault(task_info.error) [ 1445.717290] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1445.717290] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Faults: ['InvalidArgument'] [ 1445.717290] env[68040]: ERROR nova.compute.manager [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] [ 1445.717398] env[68040]: DEBUG nova.compute.utils [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] VimFaultException {{(pid=68040) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1445.719147] env[68040]: DEBUG nova.compute.manager [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Build of instance 1e43f6be-f6a3-4569-adea-c82a5d709247 was re-scheduled: A specified parameter was not correct: fileType [ 1445.719147] env[68040]: Faults: ['InvalidArgument'] {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1445.719526] env[68040]: DEBUG nova.compute.manager [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Unplugging VIFs for instance {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1445.719696] env[68040]: DEBUG nova.compute.manager [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1445.719865] env[68040]: DEBUG nova.compute.manager [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1445.720110] env[68040]: DEBUG nova.network.neutron [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1446.204100] env[68040]: DEBUG nova.network.neutron [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1446.214936] env[68040]: INFO nova.compute.manager [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Took 0.49 seconds to deallocate network for instance. [ 1446.323341] env[68040]: INFO nova.scheduler.client.report [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Deleted allocations for instance 1e43f6be-f6a3-4569-adea-c82a5d709247 [ 1446.344659] env[68040]: DEBUG oslo_concurrency.lockutils [None req-43be14b2-759a-4447-8db7-dbe56efc98d5 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Lock "1e43f6be-f6a3-4569-adea-c82a5d709247" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 637.048s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1446.346151] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a65d0288-f65a-416c-9b9c-42018bb19f4a tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Lock "1e43f6be-f6a3-4569-adea-c82a5d709247" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 439.913s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1446.346151] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a65d0288-f65a-416c-9b9c-42018bb19f4a tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Acquiring lock "1e43f6be-f6a3-4569-adea-c82a5d709247-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1446.346151] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a65d0288-f65a-416c-9b9c-42018bb19f4a tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Lock "1e43f6be-f6a3-4569-adea-c82a5d709247-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1446.346354] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a65d0288-f65a-416c-9b9c-42018bb19f4a tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Lock "1e43f6be-f6a3-4569-adea-c82a5d709247-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1446.348246] env[68040]: INFO nova.compute.manager [None req-a65d0288-f65a-416c-9b9c-42018bb19f4a tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Terminating instance [ 1446.350007] env[68040]: DEBUG nova.compute.manager [None req-a65d0288-f65a-416c-9b9c-42018bb19f4a tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1446.350244] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a65d0288-f65a-416c-9b9c-42018bb19f4a tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1446.350680] env[68040]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e5a33a9c-1943-48c2-a20a-62805bcb49bb {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.356411] env[68040]: DEBUG nova.compute.manager [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1446.362744] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4968339-ebfe-4554-869c-744fc2b31cf6 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.392745] env[68040]: WARNING nova.virt.vmwareapi.vmops [None req-a65d0288-f65a-416c-9b9c-42018bb19f4a tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1e43f6be-f6a3-4569-adea-c82a5d709247 could not be found. [ 1446.392955] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a65d0288-f65a-416c-9b9c-42018bb19f4a tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1446.393175] env[68040]: INFO nova.compute.manager [None req-a65d0288-f65a-416c-9b9c-42018bb19f4a tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1446.393438] env[68040]: DEBUG oslo.service.loopingcall [None req-a65d0288-f65a-416c-9b9c-42018bb19f4a tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1446.395631] env[68040]: DEBUG nova.compute.manager [-] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1446.395735] env[68040]: DEBUG nova.network.neutron [-] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1446.409996] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1446.409996] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1446.411098] env[68040]: INFO nova.compute.claims [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1446.622796] env[68040]: DEBUG nova.network.neutron [-] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1446.632723] env[68040]: INFO nova.compute.manager [-] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] Took 0.24 seconds to deallocate network for instance. [ 1446.713582] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9d682e3-f158-4f57-8143-2d2bf12cda3d {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.721657] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc95f4fd-7873-4a78-afda-a343fb514e2b {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.753560] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01ae43e1-dbf3-4af4-addb-4bd42000008c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.756249] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a65d0288-f65a-416c-9b9c-42018bb19f4a tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Lock "1e43f6be-f6a3-4569-adea-c82a5d709247" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.411s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1446.757313] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "1e43f6be-f6a3-4569-adea-c82a5d709247" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 235.568s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1446.757502] env[68040]: INFO nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 1e43f6be-f6a3-4569-adea-c82a5d709247] During sync_power_state the instance has a pending task (deleting). Skip. [ 1446.757672] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "1e43f6be-f6a3-4569-adea-c82a5d709247" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1446.762834] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c494dfc-9994-476a-9c0a-c0c56e85cd28 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.776684] env[68040]: DEBUG nova.compute.provider_tree [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1446.784713] env[68040]: DEBUG nova.scheduler.client.report [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1446.802621] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.393s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1446.803121] env[68040]: DEBUG nova.compute.manager [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Start building networks asynchronously for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1446.835025] env[68040]: DEBUG nova.compute.utils [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Using /dev/sd instead of None {{(pid=68040) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1446.835878] env[68040]: DEBUG nova.compute.manager [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Allocating IP information in the background. {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1446.836471] env[68040]: DEBUG nova.network.neutron [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] allocate_for_instance() {{(pid=68040) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1446.846340] env[68040]: DEBUG nova.compute.manager [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Start building block device mappings for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1446.910854] env[68040]: DEBUG nova.compute.manager [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Start spawning the instance on the hypervisor. {{(pid=68040) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1446.925906] env[68040]: DEBUG nova.policy [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd80e102965474f1eaeb720c9ccd32240', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '56cc75f0a13948a983b1eabe7887b5f8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68040) authorize /opt/stack/nova/nova/policy.py:203}} [ 1446.938175] env[68040]: DEBUG nova.virt.hardware [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:59:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:59:33Z,direct_url=,disk_format='vmdk',id=8c308313-03d5-40b6-a5fe-9037e32dc76e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0770d674a39c40089de0aade9440b370',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:59:34Z,virtual_size=,visibility=), allow threads: False {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1446.938424] env[68040]: DEBUG nova.virt.hardware [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Flavor limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1446.938578] env[68040]: DEBUG nova.virt.hardware [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Image limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1446.938759] env[68040]: DEBUG nova.virt.hardware [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Flavor pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1446.938907] env[68040]: DEBUG nova.virt.hardware [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Image pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1446.939065] env[68040]: DEBUG nova.virt.hardware [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1446.939277] env[68040]: DEBUG nova.virt.hardware [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1446.939435] env[68040]: DEBUG nova.virt.hardware [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1446.939602] env[68040]: DEBUG nova.virt.hardware [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Got 1 possible topologies {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1446.939764] env[68040]: DEBUG nova.virt.hardware [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1446.940011] env[68040]: DEBUG nova.virt.hardware [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1446.940917] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9360a73-39a9-4b90-889d-6655e28ae3c4 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.949515] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50a01150-beb6-4bcb-99b2-dd8c6d7052e5 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.363971] env[68040]: DEBUG nova.network.neutron [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Successfully created port: e681bd92-f500-4f2a-9383-23e98a1d11ec {{(pid=68040) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1447.986290] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1448.229455] env[68040]: DEBUG nova.network.neutron [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Successfully updated port: e681bd92-f500-4f2a-9383-23e98a1d11ec {{(pid=68040) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1448.242615] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Acquiring lock "refresh_cache-4dfa01f8-53a0-4ee4-9b00-93017144ea0b" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1448.242772] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Acquired lock "refresh_cache-4dfa01f8-53a0-4ee4-9b00-93017144ea0b" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1448.242925] env[68040]: DEBUG nova.network.neutron [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Building network info cache for instance {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1448.290168] env[68040]: DEBUG nova.network.neutron [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1448.463673] env[68040]: DEBUG nova.compute.manager [req-5b1d05a1-f865-4f3d-aa61-2e9654ef4f67 req-8a8470f6-dc97-419a-8a5d-6c04564ca4ca service nova] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Received event network-vif-plugged-e681bd92-f500-4f2a-9383-23e98a1d11ec {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1448.463910] env[68040]: DEBUG oslo_concurrency.lockutils [req-5b1d05a1-f865-4f3d-aa61-2e9654ef4f67 req-8a8470f6-dc97-419a-8a5d-6c04564ca4ca service nova] Acquiring lock "4dfa01f8-53a0-4ee4-9b00-93017144ea0b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1448.464119] env[68040]: DEBUG oslo_concurrency.lockutils [req-5b1d05a1-f865-4f3d-aa61-2e9654ef4f67 req-8a8470f6-dc97-419a-8a5d-6c04564ca4ca service nova] Lock "4dfa01f8-53a0-4ee4-9b00-93017144ea0b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1448.464300] env[68040]: DEBUG oslo_concurrency.lockutils [req-5b1d05a1-f865-4f3d-aa61-2e9654ef4f67 req-8a8470f6-dc97-419a-8a5d-6c04564ca4ca service nova] Lock "4dfa01f8-53a0-4ee4-9b00-93017144ea0b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1448.464467] env[68040]: DEBUG nova.compute.manager [req-5b1d05a1-f865-4f3d-aa61-2e9654ef4f67 req-8a8470f6-dc97-419a-8a5d-6c04564ca4ca service nova] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] No waiting events found dispatching network-vif-plugged-e681bd92-f500-4f2a-9383-23e98a1d11ec {{(pid=68040) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1448.464632] env[68040]: WARNING nova.compute.manager [req-5b1d05a1-f865-4f3d-aa61-2e9654ef4f67 req-8a8470f6-dc97-419a-8a5d-6c04564ca4ca service nova] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Received unexpected event network-vif-plugged-e681bd92-f500-4f2a-9383-23e98a1d11ec for instance with vm_state building and task_state spawning. [ 1448.464793] env[68040]: DEBUG nova.compute.manager [req-5b1d05a1-f865-4f3d-aa61-2e9654ef4f67 req-8a8470f6-dc97-419a-8a5d-6c04564ca4ca service nova] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Received event network-changed-e681bd92-f500-4f2a-9383-23e98a1d11ec {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1448.464946] env[68040]: DEBUG nova.compute.manager [req-5b1d05a1-f865-4f3d-aa61-2e9654ef4f67 req-8a8470f6-dc97-419a-8a5d-6c04564ca4ca service nova] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Refreshing instance network info cache due to event network-changed-e681bd92-f500-4f2a-9383-23e98a1d11ec. {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1448.465302] env[68040]: DEBUG oslo_concurrency.lockutils [req-5b1d05a1-f865-4f3d-aa61-2e9654ef4f67 req-8a8470f6-dc97-419a-8a5d-6c04564ca4ca service nova] Acquiring lock "refresh_cache-4dfa01f8-53a0-4ee4-9b00-93017144ea0b" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1448.618149] env[68040]: DEBUG nova.network.neutron [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Updating instance_info_cache with network_info: [{"id": "e681bd92-f500-4f2a-9383-23e98a1d11ec", "address": "fa:16:3e:5c:c3:66", "network": {"id": "8810e692-44f2-40eb-9fbd-0a209065c053", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1843594740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "56cc75f0a13948a983b1eabe7887b5f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape681bd92-f5", "ovs_interfaceid": "e681bd92-f500-4f2a-9383-23e98a1d11ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1448.635251] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Releasing lock "refresh_cache-4dfa01f8-53a0-4ee4-9b00-93017144ea0b" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1448.635566] env[68040]: DEBUG nova.compute.manager [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Instance network_info: |[{"id": "e681bd92-f500-4f2a-9383-23e98a1d11ec", "address": "fa:16:3e:5c:c3:66", "network": {"id": "8810e692-44f2-40eb-9fbd-0a209065c053", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1843594740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "56cc75f0a13948a983b1eabe7887b5f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape681bd92-f5", "ovs_interfaceid": "e681bd92-f500-4f2a-9383-23e98a1d11ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1448.635860] env[68040]: DEBUG oslo_concurrency.lockutils [req-5b1d05a1-f865-4f3d-aa61-2e9654ef4f67 req-8a8470f6-dc97-419a-8a5d-6c04564ca4ca service nova] Acquired lock "refresh_cache-4dfa01f8-53a0-4ee4-9b00-93017144ea0b" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1448.636053] env[68040]: DEBUG nova.network.neutron [req-5b1d05a1-f865-4f3d-aa61-2e9654ef4f67 req-8a8470f6-dc97-419a-8a5d-6c04564ca4ca service nova] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Refreshing network info cache for port e681bd92-f500-4f2a-9383-23e98a1d11ec {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1448.637105] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5c:c3:66', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e681bd92-f500-4f2a-9383-23e98a1d11ec', 'vif_model': 'vmxnet3'}] {{(pid=68040) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1448.645626] env[68040]: DEBUG oslo.service.loopingcall [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1448.646103] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Creating VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1448.648423] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2cddda92-5a80-4c2b-8f02-fdb0cb9ea37f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.669173] env[68040]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1448.669173] env[68040]: value = "task-3200286" [ 1448.669173] env[68040]: _type = "Task" [ 1448.669173] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1448.676877] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200286, 'name': CreateVM_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1449.013583] env[68040]: DEBUG nova.network.neutron [req-5b1d05a1-f865-4f3d-aa61-2e9654ef4f67 req-8a8470f6-dc97-419a-8a5d-6c04564ca4ca service nova] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Updated VIF entry in instance network info cache for port e681bd92-f500-4f2a-9383-23e98a1d11ec. {{(pid=68040) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1449.014069] env[68040]: DEBUG nova.network.neutron [req-5b1d05a1-f865-4f3d-aa61-2e9654ef4f67 req-8a8470f6-dc97-419a-8a5d-6c04564ca4ca service nova] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Updating instance_info_cache with network_info: [{"id": "e681bd92-f500-4f2a-9383-23e98a1d11ec", "address": "fa:16:3e:5c:c3:66", "network": {"id": "8810e692-44f2-40eb-9fbd-0a209065c053", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1843594740-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "56cc75f0a13948a983b1eabe7887b5f8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8bc3fa06-9d5b-4ab1-8113-6ed8942d23b6", "external-id": "nsx-vlan-transportzone-72", "segmentation_id": 72, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape681bd92-f5", "ovs_interfaceid": "e681bd92-f500-4f2a-9383-23e98a1d11ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1449.026478] env[68040]: DEBUG oslo_concurrency.lockutils [req-5b1d05a1-f865-4f3d-aa61-2e9654ef4f67 req-8a8470f6-dc97-419a-8a5d-6c04564ca4ca service nova] Releasing lock "refresh_cache-4dfa01f8-53a0-4ee4-9b00-93017144ea0b" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1449.180645] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200286, 'name': CreateVM_Task, 'duration_secs': 0.302492} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1449.181112] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Created VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1449.181942] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1449.182134] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1449.182633] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1449.182752] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6233d2af-e717-4681-915b-31ba4e5c359b {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.188060] env[68040]: DEBUG oslo_vmware.api [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Waiting for the task: (returnval){ [ 1449.188060] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52e048b9-7278-3627-913e-eb600a1948b8" [ 1449.188060] env[68040]: _type = "Task" [ 1449.188060] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1449.194776] env[68040]: DEBUG oslo_vmware.api [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52e048b9-7278-3627-913e-eb600a1948b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1449.699735] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1449.700486] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Processing image 8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1449.700813] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1450.984143] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1450.984412] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1450.996652] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1450.996868] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1450.997051] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1450.997216] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68040) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1450.998362] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc3bb3dc-bcd0-45cf-a21d-3c9e66ac1ff5 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.007274] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7870d9ae-0b12-4f03-bb4b-354948234a79 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.021989] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2163343f-ecab-42f0-ae32-b663553c7afe {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.028523] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9be4104-9743-4181-a4f6-d5520e9600f1 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.057237] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180999MB free_disk=125GB free_vcpus=48 pci_devices=None {{(pid=68040) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1451.057403] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1451.057606] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1451.140667] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 3738de32-79cd-4b04-8081-cc1146730c75 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1451.140828] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1451.141319] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1451.141319] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance e92b662c-b458-49d8-ac2a-00ae6046a11b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1451.141319] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 57cd94c2-aec3-427e-9b9f-a444fe291974 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1451.141445] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 87a7851e-d6fe-481a-8abb-5732e281cb64 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1451.141445] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance c581d685-7ea0-41f8-b911-ff1dce1b46c7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1451.141561] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 4a08d3e3-5e84-4f34-b418-2c18eadbef25 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1451.141713] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 268b5613-b132-49ed-a45b-bc88132177cf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1451.141787] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 4dfa01f8-53a0-4ee4-9b00-93017144ea0b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1451.160755] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 4ce0934f-8277-4029-8a0c-77468ee9b6dc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1451.171529] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 09489d57-c6c1-4ac2-9c14-1a190172970c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1451.182335] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance b5def543-2cbf-4ecc-b492-3607e5e74e38 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1451.193200] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1451.204892] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance f9d3a788-4f1b-46f7-83ab-dd6884f68d2e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1451.216730] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 83475c46-38de-4918-91b4-b53dcf3ead77 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1451.227947] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 6541b54b-214d-432c-8ae6-5de4ed99390f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1451.239293] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance d8f18a76-588b-4329-a167-2a571f82455f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1451.252874] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 285249df-f5b4-4a68-89fe-9281fe1573e5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1451.263816] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance d17db434-040f-4859-913e-bfd658be14b3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1451.264233] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1451.264422] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1451.580755] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffa02891-174f-44cc-89d3-0df8808b831f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.588902] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37ba7b01-60fc-4a5c-a9f0-415737053658 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.618144] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a5f40ec-07c6-42a3-9dbf-faca6ab09ea3 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.628093] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48f9a412-a614-493b-8280-8663b2f46a7d {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.638375] env[68040]: DEBUG nova.compute.provider_tree [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1451.649915] env[68040]: DEBUG nova.scheduler.client.report [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1451.665978] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68040) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1451.666196] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.609s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1452.665514] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1452.987612] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1452.987796] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Starting heal instance info cache {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1452.987917] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Rebuilding the list of instances to heal {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1453.017581] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1453.017940] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1453.018144] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1453.018339] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1453.018734] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1453.018944] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1453.019206] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1453.019350] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1453.019671] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1453.019874] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1453.020112] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Didn't find any instances for network info cache update. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1453.020703] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1453.021153] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1453.921830] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Acquiring lock "c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1453.921830] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Lock "c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1455.983765] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1455.984079] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1455.984158] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68040) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1458.980495] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1467.192725] env[68040]: DEBUG oslo_concurrency.lockutils [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Acquiring lock "221a5bbe-7168-4f5c-ab49-8a149545655f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1467.193538] env[68040]: DEBUG oslo_concurrency.lockutils [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Lock "221a5bbe-7168-4f5c-ab49-8a149545655f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1477.377819] env[68040]: DEBUG oslo_concurrency.lockutils [None req-c34bbd03-eddd-4ae7-b6cc-387e2829408c tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Acquiring lock "4dfa01f8-53a0-4ee4-9b00-93017144ea0b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1493.135296] env[68040]: WARNING oslo_vmware.rw_handles [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1493.135296] env[68040]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1493.135296] env[68040]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1493.135296] env[68040]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1493.135296] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1493.135296] env[68040]: ERROR oslo_vmware.rw_handles response.begin() [ 1493.135296] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1493.135296] env[68040]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1493.135296] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1493.135296] env[68040]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1493.135296] env[68040]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1493.135296] env[68040]: ERROR oslo_vmware.rw_handles [ 1493.135895] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Downloaded image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to vmware_temp/864af2cd-a70d-4d6d-924a-afc28973bb58/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1493.137670] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Caching image {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1493.137903] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Copying Virtual Disk [datastore2] vmware_temp/864af2cd-a70d-4d6d-924a-afc28973bb58/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk to [datastore2] vmware_temp/864af2cd-a70d-4d6d-924a-afc28973bb58/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk {{(pid=68040) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1493.138224] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ac98ba7c-14dd-464a-ae09-62d594d85401 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.145949] env[68040]: DEBUG oslo_vmware.api [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Waiting for the task: (returnval){ [ 1493.145949] env[68040]: value = "task-3200287" [ 1493.145949] env[68040]: _type = "Task" [ 1493.145949] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1493.153728] env[68040]: DEBUG oslo_vmware.api [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Task: {'id': task-3200287, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.656717] env[68040]: DEBUG oslo_vmware.exceptions [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Fault InvalidArgument not matched. {{(pid=68040) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1493.657016] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1493.657662] env[68040]: ERROR nova.compute.manager [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1493.657662] env[68040]: Faults: ['InvalidArgument'] [ 1493.657662] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Traceback (most recent call last): [ 1493.657662] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1493.657662] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] yield resources [ 1493.657662] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1493.657662] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] self.driver.spawn(context, instance, image_meta, [ 1493.657662] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1493.657662] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1493.657662] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1493.657662] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] self._fetch_image_if_missing(context, vi) [ 1493.657662] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1493.658082] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] image_cache(vi, tmp_image_ds_loc) [ 1493.658082] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1493.658082] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] vm_util.copy_virtual_disk( [ 1493.658082] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1493.658082] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] session._wait_for_task(vmdk_copy_task) [ 1493.658082] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1493.658082] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] return self.wait_for_task(task_ref) [ 1493.658082] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1493.658082] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] return evt.wait() [ 1493.658082] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1493.658082] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] result = hub.switch() [ 1493.658082] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1493.658082] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] return self.greenlet.switch() [ 1493.658427] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1493.658427] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] self.f(*self.args, **self.kw) [ 1493.658427] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1493.658427] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] raise exceptions.translate_fault(task_info.error) [ 1493.658427] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1493.658427] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Faults: ['InvalidArgument'] [ 1493.658427] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] [ 1493.658427] env[68040]: INFO nova.compute.manager [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Terminating instance [ 1493.662138] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1493.662138] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1493.662138] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Acquiring lock "refresh_cache-3738de32-79cd-4b04-8081-cc1146730c75" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1493.662138] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Acquired lock "refresh_cache-3738de32-79cd-4b04-8081-cc1146730c75" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1493.662325] env[68040]: DEBUG nova.network.neutron [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Building network info cache for instance {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1493.662435] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c4360f35-4bd1-4a81-87c3-547a632b1415 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.670152] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1493.671151] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68040) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1493.671625] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-32dea852-7f37-4ba1-a983-af13fc27df30 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.679143] env[68040]: DEBUG oslo_vmware.api [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Waiting for the task: (returnval){ [ 1493.679143] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52b35983-5a2b-5aeb-070e-ba6c8bacb593" [ 1493.679143] env[68040]: _type = "Task" [ 1493.679143] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1493.687666] env[68040]: DEBUG oslo_vmware.api [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52b35983-5a2b-5aeb-070e-ba6c8bacb593, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.720500] env[68040]: DEBUG nova.network.neutron [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1493.827368] env[68040]: DEBUG nova.network.neutron [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1493.836866] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Releasing lock "refresh_cache-3738de32-79cd-4b04-8081-cc1146730c75" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1493.837311] env[68040]: DEBUG nova.compute.manager [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1493.837506] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1493.838597] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27fb5f48-4981-42e3-85ca-fdc64f671233 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.846390] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Unregistering the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1493.846629] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2814cad8-f6ec-4abf-9c93-e80230c42680 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.876944] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Unregistered the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1493.877244] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Deleting contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1493.877457] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Deleting the datastore file [datastore2] 3738de32-79cd-4b04-8081-cc1146730c75 {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1493.877774] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3e38c4cb-211b-4733-b7a4-2f571e251fc9 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.883675] env[68040]: DEBUG oslo_vmware.api [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Waiting for the task: (returnval){ [ 1493.883675] env[68040]: value = "task-3200289" [ 1493.883675] env[68040]: _type = "Task" [ 1493.883675] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1493.891465] env[68040]: DEBUG oslo_vmware.api [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Task: {'id': task-3200289, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.189315] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Preparing fetch location {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1494.189584] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Creating directory with path [datastore2] vmware_temp/9067f534-7f58-4b55-be81-23d77e3fbc83/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1494.189820] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-905aca03-2af8-4a9e-8cfe-645797dae8d9 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.201478] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Created directory with path [datastore2] vmware_temp/9067f534-7f58-4b55-be81-23d77e3fbc83/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1494.201682] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Fetch image to [datastore2] vmware_temp/9067f534-7f58-4b55-be81-23d77e3fbc83/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1494.201857] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to [datastore2] vmware_temp/9067f534-7f58-4b55-be81-23d77e3fbc83/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1494.202626] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0a8d979-6192-4d45-8ccc-e14fb45557f6 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.209625] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8df56fc-a358-4b64-9eae-f4940654d903 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.218872] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06118a30-0f7e-4ae0-aa3f-a9d33cfba477 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.250227] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbbb2731-c564-4cce-a7d5-ac008c8c6023 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.257933] env[68040]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-3bc685ae-e2f1-4b5f-9319-cf0f09c2c781 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.278082] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1494.328928] env[68040]: DEBUG oslo_vmware.rw_handles [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9067f534-7f58-4b55-be81-23d77e3fbc83/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1494.390329] env[68040]: DEBUG oslo_vmware.rw_handles [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Completed reading data from the image iterator. {{(pid=68040) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1494.390536] env[68040]: DEBUG oslo_vmware.rw_handles [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9067f534-7f58-4b55-be81-23d77e3fbc83/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1494.394756] env[68040]: DEBUG oslo_vmware.api [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Task: {'id': task-3200289, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.044079} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1494.395013] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Deleted the datastore file {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1494.395218] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Deleted contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1494.395425] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1494.395599] env[68040]: INFO nova.compute.manager [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Took 0.56 seconds to destroy the instance on the hypervisor. [ 1494.395828] env[68040]: DEBUG oslo.service.loopingcall [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1494.396060] env[68040]: DEBUG nova.compute.manager [-] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Skipping network deallocation for instance since networking was not requested. {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 1494.398313] env[68040]: DEBUG nova.compute.claims [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Aborting claim: {{(pid=68040) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1494.398491] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1494.398723] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1494.651580] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61fe1f43-11c3-4643-a69f-2288b3ae9f18 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.659452] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c19e0b0-e4d7-44b8-9314-76033a6d6e42 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.690041] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9cd8353-ad85-41f5-8b56-2abf8864ba5b {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.696133] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-936eb977-260a-4700-9dfc-081cb1287be1 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.709345] env[68040]: DEBUG nova.compute.provider_tree [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1494.718440] env[68040]: DEBUG nova.scheduler.client.report [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1494.732399] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.334s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1494.732928] env[68040]: ERROR nova.compute.manager [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1494.732928] env[68040]: Faults: ['InvalidArgument'] [ 1494.732928] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Traceback (most recent call last): [ 1494.732928] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1494.732928] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] self.driver.spawn(context, instance, image_meta, [ 1494.732928] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1494.732928] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1494.732928] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1494.732928] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] self._fetch_image_if_missing(context, vi) [ 1494.732928] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1494.732928] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] image_cache(vi, tmp_image_ds_loc) [ 1494.732928] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1494.733387] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] vm_util.copy_virtual_disk( [ 1494.733387] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1494.733387] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] session._wait_for_task(vmdk_copy_task) [ 1494.733387] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1494.733387] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] return self.wait_for_task(task_ref) [ 1494.733387] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1494.733387] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] return evt.wait() [ 1494.733387] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1494.733387] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] result = hub.switch() [ 1494.733387] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1494.733387] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] return self.greenlet.switch() [ 1494.733387] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1494.733387] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] self.f(*self.args, **self.kw) [ 1494.733687] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1494.733687] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] raise exceptions.translate_fault(task_info.error) [ 1494.733687] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1494.733687] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Faults: ['InvalidArgument'] [ 1494.733687] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] [ 1494.733687] env[68040]: DEBUG nova.compute.utils [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] VimFaultException {{(pid=68040) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1494.734978] env[68040]: DEBUG nova.compute.manager [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Build of instance 3738de32-79cd-4b04-8081-cc1146730c75 was re-scheduled: A specified parameter was not correct: fileType [ 1494.734978] env[68040]: Faults: ['InvalidArgument'] {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1494.735358] env[68040]: DEBUG nova.compute.manager [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Unplugging VIFs for instance {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1494.735582] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Acquiring lock "refresh_cache-3738de32-79cd-4b04-8081-cc1146730c75" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1494.735729] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Acquired lock "refresh_cache-3738de32-79cd-4b04-8081-cc1146730c75" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1494.735889] env[68040]: DEBUG nova.network.neutron [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Building network info cache for instance {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1494.760731] env[68040]: DEBUG nova.network.neutron [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1494.843509] env[68040]: DEBUG nova.network.neutron [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1494.856790] env[68040]: DEBUG oslo_concurrency.lockutils [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Acquiring lock "031481de-d52f-4f3f-80e5-0d0d6803d624" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1494.857042] env[68040]: DEBUG oslo_concurrency.lockutils [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Lock "031481de-d52f-4f3f-80e5-0d0d6803d624" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1494.858685] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Releasing lock "refresh_cache-3738de32-79cd-4b04-8081-cc1146730c75" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1494.858996] env[68040]: DEBUG nova.compute.manager [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1494.859098] env[68040]: DEBUG nova.compute.manager [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Skipping network deallocation for instance since networking was not requested. {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 1494.946895] env[68040]: INFO nova.scheduler.client.report [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Deleted allocations for instance 3738de32-79cd-4b04-8081-cc1146730c75 [ 1494.971812] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a2fc6f41-b845-465c-91ce-1ebc5c57e125 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Lock "3738de32-79cd-4b04-8081-cc1146730c75" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 642.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1494.973011] env[68040]: DEBUG oslo_concurrency.lockutils [None req-3f01ee9e-35c0-427d-9702-881f69100960 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Lock "3738de32-79cd-4b04-8081-cc1146730c75" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 446.131s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1494.973252] env[68040]: DEBUG oslo_concurrency.lockutils [None req-3f01ee9e-35c0-427d-9702-881f69100960 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Acquiring lock "3738de32-79cd-4b04-8081-cc1146730c75-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1494.973461] env[68040]: DEBUG oslo_concurrency.lockutils [None req-3f01ee9e-35c0-427d-9702-881f69100960 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Lock "3738de32-79cd-4b04-8081-cc1146730c75-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1494.973627] env[68040]: DEBUG oslo_concurrency.lockutils [None req-3f01ee9e-35c0-427d-9702-881f69100960 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Lock "3738de32-79cd-4b04-8081-cc1146730c75-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1494.975637] env[68040]: INFO nova.compute.manager [None req-3f01ee9e-35c0-427d-9702-881f69100960 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Terminating instance [ 1494.977225] env[68040]: DEBUG oslo_concurrency.lockutils [None req-3f01ee9e-35c0-427d-9702-881f69100960 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Acquiring lock "refresh_cache-3738de32-79cd-4b04-8081-cc1146730c75" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1494.977478] env[68040]: DEBUG oslo_concurrency.lockutils [None req-3f01ee9e-35c0-427d-9702-881f69100960 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Acquired lock "refresh_cache-3738de32-79cd-4b04-8081-cc1146730c75" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1494.977553] env[68040]: DEBUG nova.network.neutron [None req-3f01ee9e-35c0-427d-9702-881f69100960 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Building network info cache for instance {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1494.986776] env[68040]: DEBUG nova.compute.manager [None req-78f35d23-63af-45ac-8ec0-4daa73a597f6 tempest-AttachInterfacesTestJSON-1449330779 tempest-AttachInterfacesTestJSON-1449330779-project-member] [instance: 4ce0934f-8277-4029-8a0c-77468ee9b6dc] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1495.017951] env[68040]: DEBUG nova.network.neutron [None req-3f01ee9e-35c0-427d-9702-881f69100960 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1495.020808] env[68040]: DEBUG nova.compute.manager [None req-78f35d23-63af-45ac-8ec0-4daa73a597f6 tempest-AttachInterfacesTestJSON-1449330779 tempest-AttachInterfacesTestJSON-1449330779-project-member] [instance: 4ce0934f-8277-4029-8a0c-77468ee9b6dc] Instance disappeared before build. {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1495.067270] env[68040]: DEBUG oslo_concurrency.lockutils [None req-78f35d23-63af-45ac-8ec0-4daa73a597f6 tempest-AttachInterfacesTestJSON-1449330779 tempest-AttachInterfacesTestJSON-1449330779-project-member] Lock "4ce0934f-8277-4029-8a0c-77468ee9b6dc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 212.772s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1495.078811] env[68040]: DEBUG nova.compute.manager [None req-9aca850b-bef5-436d-97e8-51de58b83b70 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 09489d57-c6c1-4ac2-9c14-1a190172970c] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1495.103837] env[68040]: DEBUG nova.compute.manager [None req-9aca850b-bef5-436d-97e8-51de58b83b70 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 09489d57-c6c1-4ac2-9c14-1a190172970c] Instance disappeared before build. {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1495.124801] env[68040]: DEBUG oslo_concurrency.lockutils [None req-9aca850b-bef5-436d-97e8-51de58b83b70 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Lock "09489d57-c6c1-4ac2-9c14-1a190172970c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 197.264s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1495.134977] env[68040]: DEBUG nova.compute.manager [None req-25111dc7-4803-4770-ba21-7ff836ea8cc4 tempest-ServersTestFqdnHostnames-1502255862 tempest-ServersTestFqdnHostnames-1502255862-project-member] [instance: b5def543-2cbf-4ecc-b492-3607e5e74e38] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1495.187033] env[68040]: DEBUG oslo_concurrency.lockutils [None req-25111dc7-4803-4770-ba21-7ff836ea8cc4 tempest-ServersTestFqdnHostnames-1502255862 tempest-ServersTestFqdnHostnames-1502255862-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1495.187305] env[68040]: DEBUG oslo_concurrency.lockutils [None req-25111dc7-4803-4770-ba21-7ff836ea8cc4 tempest-ServersTestFqdnHostnames-1502255862 tempest-ServersTestFqdnHostnames-1502255862-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1495.189011] env[68040]: INFO nova.compute.claims [None req-25111dc7-4803-4770-ba21-7ff836ea8cc4 tempest-ServersTestFqdnHostnames-1502255862 tempest-ServersTestFqdnHostnames-1502255862-project-member] [instance: b5def543-2cbf-4ecc-b492-3607e5e74e38] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1495.351062] env[68040]: DEBUG nova.network.neutron [None req-3f01ee9e-35c0-427d-9702-881f69100960 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1495.361723] env[68040]: DEBUG oslo_concurrency.lockutils [None req-3f01ee9e-35c0-427d-9702-881f69100960 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Releasing lock "refresh_cache-3738de32-79cd-4b04-8081-cc1146730c75" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1495.362146] env[68040]: DEBUG nova.compute.manager [None req-3f01ee9e-35c0-427d-9702-881f69100960 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1495.362341] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-3f01ee9e-35c0-427d-9702-881f69100960 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1495.362909] env[68040]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1c512895-03f4-49da-a6c1-05dca3ae26e7 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.374759] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03534822-9275-4cff-97ab-43550b019fc7 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.408539] env[68040]: WARNING nova.virt.vmwareapi.vmops [None req-3f01ee9e-35c0-427d-9702-881f69100960 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3738de32-79cd-4b04-8081-cc1146730c75 could not be found. [ 1495.408726] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-3f01ee9e-35c0-427d-9702-881f69100960 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1495.408892] env[68040]: INFO nova.compute.manager [None req-3f01ee9e-35c0-427d-9702-881f69100960 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1495.409168] env[68040]: DEBUG oslo.service.loopingcall [None req-3f01ee9e-35c0-427d-9702-881f69100960 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1495.409394] env[68040]: DEBUG nova.compute.manager [-] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1495.409494] env[68040]: DEBUG nova.network.neutron [-] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1495.470869] env[68040]: DEBUG oslo_concurrency.lockutils [None req-7a535bce-67e5-4580-8a5d-7e0e4585e7b1 tempest-ServersTestFqdnHostnames-1502255862 tempest-ServersTestFqdnHostnames-1502255862-project-member] Acquiring lock "b5def543-2cbf-4ecc-b492-3607e5e74e38" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1495.527722] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f71f18d-c5b3-4f56-8820-7775fc3e0f8b {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.538139] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-902ca75c-52ae-467e-956b-c928f6f8f496 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.583877] env[68040]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=68040) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1495.584202] env[68040]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1495.584930] env[68040]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1495.584930] env[68040]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1495.584930] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1495.584930] env[68040]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1495.584930] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1495.584930] env[68040]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1495.584930] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1495.584930] env[68040]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1495.584930] env[68040]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1495.584930] env[68040]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-6dc22c9a-63f9-43e0-8667-fcccb82d94c2'] [ 1495.584930] env[68040]: ERROR oslo.service.loopingcall [ 1495.584930] env[68040]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1495.584930] env[68040]: ERROR oslo.service.loopingcall [ 1495.584930] env[68040]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1495.584930] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1495.584930] env[68040]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1495.585396] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1495.585396] env[68040]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1495.585396] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1495.585396] env[68040]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1495.585396] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1495.585396] env[68040]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1495.585396] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1495.585396] env[68040]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1495.585396] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1495.585396] env[68040]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1495.585396] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1495.585396] env[68040]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1495.585396] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1495.585396] env[68040]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1495.585396] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1495.585396] env[68040]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1495.585396] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1495.585396] env[68040]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1495.585889] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1495.585889] env[68040]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1495.585889] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1495.585889] env[68040]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1495.585889] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1495.585889] env[68040]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1495.585889] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1495.585889] env[68040]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1495.585889] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1495.585889] env[68040]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1495.585889] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1495.585889] env[68040]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1495.585889] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1495.585889] env[68040]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1495.585889] env[68040]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1495.585889] env[68040]: ERROR oslo.service.loopingcall [ 1495.586333] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61d99a92-480f-41cf-ba1d-0017722076f0 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.589086] env[68040]: ERROR nova.compute.manager [None req-3f01ee9e-35c0-427d-9702-881f69100960 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1495.595975] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c997445e-4bf6-4947-bee6-7ccaaa389697 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.609515] env[68040]: DEBUG nova.compute.provider_tree [None req-25111dc7-4803-4770-ba21-7ff836ea8cc4 tempest-ServersTestFqdnHostnames-1502255862 tempest-ServersTestFqdnHostnames-1502255862-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1495.620017] env[68040]: DEBUG nova.scheduler.client.report [None req-25111dc7-4803-4770-ba21-7ff836ea8cc4 tempest-ServersTestFqdnHostnames-1502255862 tempest-ServersTestFqdnHostnames-1502255862-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1495.623908] env[68040]: ERROR nova.compute.manager [None req-3f01ee9e-35c0-427d-9702-881f69100960 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1495.623908] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Traceback (most recent call last): [ 1495.623908] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1495.623908] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] ret = obj(*args, **kwargs) [ 1495.623908] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1495.623908] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] exception_handler_v20(status_code, error_body) [ 1495.623908] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1495.623908] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] raise client_exc(message=error_message, [ 1495.623908] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1495.623908] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Neutron server returns request_ids: ['req-6dc22c9a-63f9-43e0-8667-fcccb82d94c2'] [ 1495.623908] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] [ 1495.624338] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] During handling of the above exception, another exception occurred: [ 1495.624338] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] [ 1495.624338] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Traceback (most recent call last): [ 1495.624338] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1495.624338] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] self._delete_instance(context, instance, bdms) [ 1495.624338] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1495.624338] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] self._shutdown_instance(context, instance, bdms) [ 1495.624338] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1495.624338] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] self._try_deallocate_network(context, instance, requested_networks) [ 1495.624338] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1495.624338] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] with excutils.save_and_reraise_exception(): [ 1495.624338] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1495.624338] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] self.force_reraise() [ 1495.624720] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1495.624720] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] raise self.value [ 1495.624720] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1495.624720] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] _deallocate_network_with_retries() [ 1495.624720] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1495.624720] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] return evt.wait() [ 1495.624720] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1495.624720] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] result = hub.switch() [ 1495.624720] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1495.624720] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] return self.greenlet.switch() [ 1495.624720] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1495.624720] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] result = func(*self.args, **self.kw) [ 1495.625059] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1495.625059] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] result = f(*args, **kwargs) [ 1495.625059] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1495.625059] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] self._deallocate_network( [ 1495.625059] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1495.625059] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] self.network_api.deallocate_for_instance( [ 1495.625059] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1495.625059] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] data = neutron.list_ports(**search_opts) [ 1495.625059] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1495.625059] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] ret = obj(*args, **kwargs) [ 1495.625059] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1495.625059] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] return self.list('ports', self.ports_path, retrieve_all, [ 1495.625059] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1495.625434] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] ret = obj(*args, **kwargs) [ 1495.625434] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1495.625434] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] for r in self._pagination(collection, path, **params): [ 1495.625434] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1495.625434] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] res = self.get(path, params=params) [ 1495.625434] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1495.625434] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] ret = obj(*args, **kwargs) [ 1495.625434] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1495.625434] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] return self.retry_request("GET", action, body=body, [ 1495.625434] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1495.625434] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] ret = obj(*args, **kwargs) [ 1495.625434] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1495.625434] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] return self.do_request(method, action, body=body, [ 1495.625803] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1495.625803] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] ret = obj(*args, **kwargs) [ 1495.625803] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1495.625803] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] self._handle_fault_response(status_code, replybody, resp) [ 1495.625803] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1495.625803] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1495.625803] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1495.625803] env[68040]: ERROR nova.compute.manager [instance: 3738de32-79cd-4b04-8081-cc1146730c75] [ 1495.634543] env[68040]: DEBUG oslo_concurrency.lockutils [None req-25111dc7-4803-4770-ba21-7ff836ea8cc4 tempest-ServersTestFqdnHostnames-1502255862 tempest-ServersTestFqdnHostnames-1502255862-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.447s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1495.635019] env[68040]: DEBUG nova.compute.manager [None req-25111dc7-4803-4770-ba21-7ff836ea8cc4 tempest-ServersTestFqdnHostnames-1502255862 tempest-ServersTestFqdnHostnames-1502255862-project-member] [instance: b5def543-2cbf-4ecc-b492-3607e5e74e38] Start building networks asynchronously for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1495.664224] env[68040]: DEBUG nova.compute.claims [None req-25111dc7-4803-4770-ba21-7ff836ea8cc4 tempest-ServersTestFqdnHostnames-1502255862 tempest-ServersTestFqdnHostnames-1502255862-project-member] [instance: b5def543-2cbf-4ecc-b492-3607e5e74e38] Aborting claim: {{(pid=68040) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1495.664440] env[68040]: DEBUG oslo_concurrency.lockutils [None req-25111dc7-4803-4770-ba21-7ff836ea8cc4 tempest-ServersTestFqdnHostnames-1502255862 tempest-ServersTestFqdnHostnames-1502255862-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1495.664672] env[68040]: DEBUG oslo_concurrency.lockutils [None req-25111dc7-4803-4770-ba21-7ff836ea8cc4 tempest-ServersTestFqdnHostnames-1502255862 tempest-ServersTestFqdnHostnames-1502255862-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1495.668167] env[68040]: DEBUG oslo_concurrency.lockutils [None req-3f01ee9e-35c0-427d-9702-881f69100960 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Lock "3738de32-79cd-4b04-8081-cc1146730c75" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.695s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1495.669398] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "3738de32-79cd-4b04-8081-cc1146730c75" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 284.480s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1495.669595] env[68040]: INFO nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] During sync_power_state the instance has a pending task (deleting). Skip. [ 1495.669750] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "3738de32-79cd-4b04-8081-cc1146730c75" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1495.733762] env[68040]: INFO nova.compute.manager [None req-3f01ee9e-35c0-427d-9702-881f69100960 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] [instance: 3738de32-79cd-4b04-8081-cc1146730c75] Successfully reverted task state from None on failure for instance. [ 1495.737840] env[68040]: ERROR oslo_messaging.rpc.server [None req-3f01ee9e-35c0-427d-9702-881f69100960 tempest-ServerShowV254Test-185786981 tempest-ServerShowV254Test-185786981-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1495.737840] env[68040]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1495.737840] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1495.737840] env[68040]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1495.737840] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1495.737840] env[68040]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1495.737840] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1495.737840] env[68040]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1495.737840] env[68040]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1495.737840] env[68040]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-6dc22c9a-63f9-43e0-8667-fcccb82d94c2'] [ 1495.737840] env[68040]: ERROR oslo_messaging.rpc.server [ 1495.737840] env[68040]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1495.737840] env[68040]: ERROR oslo_messaging.rpc.server [ 1495.737840] env[68040]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1495.737840] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1495.737840] env[68040]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1495.738504] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1495.738504] env[68040]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1495.738504] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1495.738504] env[68040]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1495.738504] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1495.738504] env[68040]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1495.738504] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1495.738504] env[68040]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1495.738504] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1495.738504] env[68040]: ERROR oslo_messaging.rpc.server raise self.value [ 1495.738504] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1495.738504] env[68040]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1495.738504] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1495.738504] env[68040]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1495.738504] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1495.738504] env[68040]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1495.738504] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1495.738504] env[68040]: ERROR oslo_messaging.rpc.server raise self.value [ 1495.739009] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1495.739009] env[68040]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1495.739009] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1495.739009] env[68040]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1495.739009] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1495.739009] env[68040]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1495.739009] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1495.739009] env[68040]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1495.739009] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1495.739009] env[68040]: ERROR oslo_messaging.rpc.server raise self.value [ 1495.739009] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1495.739009] env[68040]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1495.739009] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3344, in terminate_instance [ 1495.739009] env[68040]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1495.739009] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1495.739009] env[68040]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1495.739009] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3339, in do_terminate_instance [ 1495.739009] env[68040]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1495.739474] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1495.739474] env[68040]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1495.739474] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1495.739474] env[68040]: ERROR oslo_messaging.rpc.server raise self.value [ 1495.739474] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1495.739474] env[68040]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1495.739474] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1495.739474] env[68040]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1495.739474] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1495.739474] env[68040]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1495.739474] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1495.739474] env[68040]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1495.739474] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1495.739474] env[68040]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1495.739474] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1495.739474] env[68040]: ERROR oslo_messaging.rpc.server raise self.value [ 1495.739474] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1495.739474] env[68040]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1495.739922] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1495.739922] env[68040]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1495.739922] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1495.739922] env[68040]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1495.739922] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1495.739922] env[68040]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1495.739922] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1495.739922] env[68040]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1495.739922] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1495.739922] env[68040]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1495.739922] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1495.739922] env[68040]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1495.739922] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1495.739922] env[68040]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1495.739922] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1495.739922] env[68040]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1495.739922] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1495.739922] env[68040]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1495.740448] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1495.740448] env[68040]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1495.740448] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1495.740448] env[68040]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1495.740448] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1495.740448] env[68040]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1495.740448] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1495.740448] env[68040]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1495.740448] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1495.740448] env[68040]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1495.740448] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1495.740448] env[68040]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1495.740448] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1495.740448] env[68040]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1495.740448] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1495.740448] env[68040]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1495.740448] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1495.740448] env[68040]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1495.740919] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1495.740919] env[68040]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1495.740919] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1495.740919] env[68040]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1495.740919] env[68040]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1495.740919] env[68040]: ERROR oslo_messaging.rpc.server [ 1495.911533] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-448081d9-a236-4f29-b0c0-eff3504a0827 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.920650] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed33a647-bfc5-4183-9bd8-954d3569d6b1 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.949918] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c43b55b3-f279-4033-b8a3-f4c1b350f94e {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.956972] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe1d1261-1cb9-42e6-a144-900f2211ab65 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.969643] env[68040]: DEBUG nova.compute.provider_tree [None req-25111dc7-4803-4770-ba21-7ff836ea8cc4 tempest-ServersTestFqdnHostnames-1502255862 tempest-ServersTestFqdnHostnames-1502255862-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1495.978447] env[68040]: DEBUG nova.scheduler.client.report [None req-25111dc7-4803-4770-ba21-7ff836ea8cc4 tempest-ServersTestFqdnHostnames-1502255862 tempest-ServersTestFqdnHostnames-1502255862-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1495.991448] env[68040]: DEBUG oslo_concurrency.lockutils [None req-25111dc7-4803-4770-ba21-7ff836ea8cc4 tempest-ServersTestFqdnHostnames-1502255862 tempest-ServersTestFqdnHostnames-1502255862-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.327s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1495.992151] env[68040]: DEBUG nova.compute.utils [None req-25111dc7-4803-4770-ba21-7ff836ea8cc4 tempest-ServersTestFqdnHostnames-1502255862 tempest-ServersTestFqdnHostnames-1502255862-project-member] [instance: b5def543-2cbf-4ecc-b492-3607e5e74e38] Conflict updating instance b5def543-2cbf-4ecc-b492-3607e5e74e38. Expected: {'task_state': [None]}. Actual: {'task_state': 'deleting'} {{(pid=68040) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1495.993519] env[68040]: DEBUG nova.compute.manager [None req-25111dc7-4803-4770-ba21-7ff836ea8cc4 tempest-ServersTestFqdnHostnames-1502255862 tempest-ServersTestFqdnHostnames-1502255862-project-member] [instance: b5def543-2cbf-4ecc-b492-3607e5e74e38] Instance disappeared during build. {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2504}} [ 1495.993693] env[68040]: DEBUG nova.compute.manager [None req-25111dc7-4803-4770-ba21-7ff836ea8cc4 tempest-ServersTestFqdnHostnames-1502255862 tempest-ServersTestFqdnHostnames-1502255862-project-member] [instance: b5def543-2cbf-4ecc-b492-3607e5e74e38] Unplugging VIFs for instance {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1495.993910] env[68040]: DEBUG oslo_concurrency.lockutils [None req-25111dc7-4803-4770-ba21-7ff836ea8cc4 tempest-ServersTestFqdnHostnames-1502255862 tempest-ServersTestFqdnHostnames-1502255862-project-member] Acquiring lock "refresh_cache-b5def543-2cbf-4ecc-b492-3607e5e74e38" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1495.994069] env[68040]: DEBUG oslo_concurrency.lockutils [None req-25111dc7-4803-4770-ba21-7ff836ea8cc4 tempest-ServersTestFqdnHostnames-1502255862 tempest-ServersTestFqdnHostnames-1502255862-project-member] Acquired lock "refresh_cache-b5def543-2cbf-4ecc-b492-3607e5e74e38" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1495.994241] env[68040]: DEBUG nova.network.neutron [None req-25111dc7-4803-4770-ba21-7ff836ea8cc4 tempest-ServersTestFqdnHostnames-1502255862 tempest-ServersTestFqdnHostnames-1502255862-project-member] [instance: b5def543-2cbf-4ecc-b492-3607e5e74e38] Building network info cache for instance {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1496.036704] env[68040]: DEBUG nova.network.neutron [None req-25111dc7-4803-4770-ba21-7ff836ea8cc4 tempest-ServersTestFqdnHostnames-1502255862 tempest-ServersTestFqdnHostnames-1502255862-project-member] [instance: b5def543-2cbf-4ecc-b492-3607e5e74e38] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1496.229594] env[68040]: DEBUG nova.network.neutron [None req-25111dc7-4803-4770-ba21-7ff836ea8cc4 tempest-ServersTestFqdnHostnames-1502255862 tempest-ServersTestFqdnHostnames-1502255862-project-member] [instance: b5def543-2cbf-4ecc-b492-3607e5e74e38] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1496.239130] env[68040]: DEBUG oslo_concurrency.lockutils [None req-25111dc7-4803-4770-ba21-7ff836ea8cc4 tempest-ServersTestFqdnHostnames-1502255862 tempest-ServersTestFqdnHostnames-1502255862-project-member] Releasing lock "refresh_cache-b5def543-2cbf-4ecc-b492-3607e5e74e38" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1496.239364] env[68040]: DEBUG nova.compute.manager [None req-25111dc7-4803-4770-ba21-7ff836ea8cc4 tempest-ServersTestFqdnHostnames-1502255862 tempest-ServersTestFqdnHostnames-1502255862-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1496.239551] env[68040]: DEBUG nova.compute.manager [None req-25111dc7-4803-4770-ba21-7ff836ea8cc4 tempest-ServersTestFqdnHostnames-1502255862 tempest-ServersTestFqdnHostnames-1502255862-project-member] [instance: b5def543-2cbf-4ecc-b492-3607e5e74e38] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1496.239750] env[68040]: DEBUG nova.network.neutron [None req-25111dc7-4803-4770-ba21-7ff836ea8cc4 tempest-ServersTestFqdnHostnames-1502255862 tempest-ServersTestFqdnHostnames-1502255862-project-member] [instance: b5def543-2cbf-4ecc-b492-3607e5e74e38] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1496.257167] env[68040]: DEBUG nova.network.neutron [None req-25111dc7-4803-4770-ba21-7ff836ea8cc4 tempest-ServersTestFqdnHostnames-1502255862 tempest-ServersTestFqdnHostnames-1502255862-project-member] [instance: b5def543-2cbf-4ecc-b492-3607e5e74e38] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1496.265152] env[68040]: DEBUG nova.network.neutron [None req-25111dc7-4803-4770-ba21-7ff836ea8cc4 tempest-ServersTestFqdnHostnames-1502255862 tempest-ServersTestFqdnHostnames-1502255862-project-member] [instance: b5def543-2cbf-4ecc-b492-3607e5e74e38] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1496.272731] env[68040]: INFO nova.compute.manager [None req-25111dc7-4803-4770-ba21-7ff836ea8cc4 tempest-ServersTestFqdnHostnames-1502255862 tempest-ServersTestFqdnHostnames-1502255862-project-member] [instance: b5def543-2cbf-4ecc-b492-3607e5e74e38] Took 0.03 seconds to deallocate network for instance. [ 1496.353863] env[68040]: INFO nova.scheduler.client.report [None req-25111dc7-4803-4770-ba21-7ff836ea8cc4 tempest-ServersTestFqdnHostnames-1502255862 tempest-ServersTestFqdnHostnames-1502255862-project-member] Deleted allocations for instance b5def543-2cbf-4ecc-b492-3607e5e74e38 [ 1496.353863] env[68040]: DEBUG oslo_concurrency.lockutils [None req-25111dc7-4803-4770-ba21-7ff836ea8cc4 tempest-ServersTestFqdnHostnames-1502255862 tempest-ServersTestFqdnHostnames-1502255862-project-member] Lock "b5def543-2cbf-4ecc-b492-3607e5e74e38" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 196.976s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1496.355088] env[68040]: DEBUG oslo_concurrency.lockutils [None req-7a535bce-67e5-4580-8a5d-7e0e4585e7b1 tempest-ServersTestFqdnHostnames-1502255862 tempest-ServersTestFqdnHostnames-1502255862-project-member] Lock "b5def543-2cbf-4ecc-b492-3607e5e74e38" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.885s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1496.355346] env[68040]: DEBUG oslo_concurrency.lockutils [None req-7a535bce-67e5-4580-8a5d-7e0e4585e7b1 tempest-ServersTestFqdnHostnames-1502255862 tempest-ServersTestFqdnHostnames-1502255862-project-member] Acquiring lock "b5def543-2cbf-4ecc-b492-3607e5e74e38-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1496.355905] env[68040]: DEBUG oslo_concurrency.lockutils [None req-7a535bce-67e5-4580-8a5d-7e0e4585e7b1 tempest-ServersTestFqdnHostnames-1502255862 tempest-ServersTestFqdnHostnames-1502255862-project-member] Lock "b5def543-2cbf-4ecc-b492-3607e5e74e38-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1496.356096] env[68040]: DEBUG oslo_concurrency.lockutils [None req-7a535bce-67e5-4580-8a5d-7e0e4585e7b1 tempest-ServersTestFqdnHostnames-1502255862 tempest-ServersTestFqdnHostnames-1502255862-project-member] Lock "b5def543-2cbf-4ecc-b492-3607e5e74e38-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1496.358145] env[68040]: INFO nova.compute.manager [None req-7a535bce-67e5-4580-8a5d-7e0e4585e7b1 tempest-ServersTestFqdnHostnames-1502255862 tempest-ServersTestFqdnHostnames-1502255862-project-member] [instance: b5def543-2cbf-4ecc-b492-3607e5e74e38] Terminating instance [ 1496.359727] env[68040]: DEBUG oslo_concurrency.lockutils [None req-7a535bce-67e5-4580-8a5d-7e0e4585e7b1 tempest-ServersTestFqdnHostnames-1502255862 tempest-ServersTestFqdnHostnames-1502255862-project-member] Acquiring lock "refresh_cache-b5def543-2cbf-4ecc-b492-3607e5e74e38" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1496.359884] env[68040]: DEBUG oslo_concurrency.lockutils [None req-7a535bce-67e5-4580-8a5d-7e0e4585e7b1 tempest-ServersTestFqdnHostnames-1502255862 tempest-ServersTestFqdnHostnames-1502255862-project-member] Acquired lock "refresh_cache-b5def543-2cbf-4ecc-b492-3607e5e74e38" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1496.360066] env[68040]: DEBUG nova.network.neutron [None req-7a535bce-67e5-4580-8a5d-7e0e4585e7b1 tempest-ServersTestFqdnHostnames-1502255862 tempest-ServersTestFqdnHostnames-1502255862-project-member] [instance: b5def543-2cbf-4ecc-b492-3607e5e74e38] Building network info cache for instance {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1496.364253] env[68040]: DEBUG nova.compute.manager [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1496.395024] env[68040]: DEBUG nova.network.neutron [None req-7a535bce-67e5-4580-8a5d-7e0e4585e7b1 tempest-ServersTestFqdnHostnames-1502255862 tempest-ServersTestFqdnHostnames-1502255862-project-member] [instance: b5def543-2cbf-4ecc-b492-3607e5e74e38] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1496.410626] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1496.410865] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1496.412366] env[68040]: INFO nova.compute.claims [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1496.525630] env[68040]: DEBUG nova.network.neutron [None req-7a535bce-67e5-4580-8a5d-7e0e4585e7b1 tempest-ServersTestFqdnHostnames-1502255862 tempest-ServersTestFqdnHostnames-1502255862-project-member] [instance: b5def543-2cbf-4ecc-b492-3607e5e74e38] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1496.534400] env[68040]: DEBUG oslo_concurrency.lockutils [None req-7a535bce-67e5-4580-8a5d-7e0e4585e7b1 tempest-ServersTestFqdnHostnames-1502255862 tempest-ServersTestFqdnHostnames-1502255862-project-member] Releasing lock "refresh_cache-b5def543-2cbf-4ecc-b492-3607e5e74e38" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1496.534790] env[68040]: DEBUG nova.compute.manager [None req-7a535bce-67e5-4580-8a5d-7e0e4585e7b1 tempest-ServersTestFqdnHostnames-1502255862 tempest-ServersTestFqdnHostnames-1502255862-project-member] [instance: b5def543-2cbf-4ecc-b492-3607e5e74e38] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1496.534983] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-7a535bce-67e5-4580-8a5d-7e0e4585e7b1 tempest-ServersTestFqdnHostnames-1502255862 tempest-ServersTestFqdnHostnames-1502255862-project-member] [instance: b5def543-2cbf-4ecc-b492-3607e5e74e38] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1496.535510] env[68040]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ac214352-039f-473e-87c5-481ca5401d98 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.549625] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cec1aba-0189-4cd5-92c1-51c50f814ebd {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.583082] env[68040]: WARNING nova.virt.vmwareapi.vmops [None req-7a535bce-67e5-4580-8a5d-7e0e4585e7b1 tempest-ServersTestFqdnHostnames-1502255862 tempest-ServersTestFqdnHostnames-1502255862-project-member] [instance: b5def543-2cbf-4ecc-b492-3607e5e74e38] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b5def543-2cbf-4ecc-b492-3607e5e74e38 could not be found. [ 1496.583687] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-7a535bce-67e5-4580-8a5d-7e0e4585e7b1 tempest-ServersTestFqdnHostnames-1502255862 tempest-ServersTestFqdnHostnames-1502255862-project-member] [instance: b5def543-2cbf-4ecc-b492-3607e5e74e38] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1496.583864] env[68040]: INFO nova.compute.manager [None req-7a535bce-67e5-4580-8a5d-7e0e4585e7b1 tempest-ServersTestFqdnHostnames-1502255862 tempest-ServersTestFqdnHostnames-1502255862-project-member] [instance: b5def543-2cbf-4ecc-b492-3607e5e74e38] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1496.584278] env[68040]: DEBUG oslo.service.loopingcall [None req-7a535bce-67e5-4580-8a5d-7e0e4585e7b1 tempest-ServersTestFqdnHostnames-1502255862 tempest-ServersTestFqdnHostnames-1502255862-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1496.588051] env[68040]: DEBUG nova.compute.manager [-] [instance: b5def543-2cbf-4ecc-b492-3607e5e74e38] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1496.588051] env[68040]: DEBUG nova.network.neutron [-] [instance: b5def543-2cbf-4ecc-b492-3607e5e74e38] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1496.609933] env[68040]: DEBUG nova.network.neutron [-] [instance: b5def543-2cbf-4ecc-b492-3607e5e74e38] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1496.618066] env[68040]: DEBUG nova.network.neutron [-] [instance: b5def543-2cbf-4ecc-b492-3607e5e74e38] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1496.629364] env[68040]: INFO nova.compute.manager [-] [instance: b5def543-2cbf-4ecc-b492-3607e5e74e38] Took 0.04 seconds to deallocate network for instance. [ 1496.712438] env[68040]: DEBUG oslo_concurrency.lockutils [None req-7a535bce-67e5-4580-8a5d-7e0e4585e7b1 tempest-ServersTestFqdnHostnames-1502255862 tempest-ServersTestFqdnHostnames-1502255862-project-member] Lock "b5def543-2cbf-4ecc-b492-3607e5e74e38" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.357s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1496.739883] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09a58db6-d9ff-408a-b60b-f28bd840383c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.747909] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ade0b966-5ed0-4b04-b34f-3c0f1c44b8c6 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.779958] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac2f8ced-472f-4a42-b800-ba885f5b1ddf {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.788024] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3fe754e-4228-48d9-a3ff-15a9e6ec5c6c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.801663] env[68040]: DEBUG nova.compute.provider_tree [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1496.810054] env[68040]: DEBUG nova.scheduler.client.report [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1496.823398] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.412s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1496.823873] env[68040]: DEBUG nova.compute.manager [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Start building networks asynchronously for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1496.854552] env[68040]: DEBUG nova.compute.utils [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Using /dev/sd instead of None {{(pid=68040) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1496.855919] env[68040]: DEBUG nova.compute.manager [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Not allocating networking since 'none' was specified. {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1968}} [ 1496.863776] env[68040]: DEBUG nova.compute.manager [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Start building block device mappings for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1496.922575] env[68040]: DEBUG nova.compute.manager [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Start spawning the instance on the hypervisor. {{(pid=68040) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1496.948194] env[68040]: DEBUG nova.virt.hardware [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:59:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:59:33Z,direct_url=,disk_format='vmdk',id=8c308313-03d5-40b6-a5fe-9037e32dc76e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0770d674a39c40089de0aade9440b370',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:59:34Z,virtual_size=,visibility=), allow threads: False {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1496.948453] env[68040]: DEBUG nova.virt.hardware [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Flavor limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1496.948620] env[68040]: DEBUG nova.virt.hardware [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Image limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1496.948804] env[68040]: DEBUG nova.virt.hardware [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Flavor pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1496.948950] env[68040]: DEBUG nova.virt.hardware [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Image pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1496.949112] env[68040]: DEBUG nova.virt.hardware [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1496.949319] env[68040]: DEBUG nova.virt.hardware [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1496.949476] env[68040]: DEBUG nova.virt.hardware [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1496.949642] env[68040]: DEBUG nova.virt.hardware [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Got 1 possible topologies {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1496.949807] env[68040]: DEBUG nova.virt.hardware [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1496.949982] env[68040]: DEBUG nova.virt.hardware [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1496.950899] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-986a5777-b072-4a80-a188-f35e7dffde52 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.958752] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e473ddc7-dc38-438a-bc42-5facad5778d5 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.972281] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Instance VIF info [] {{(pid=68040) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1496.977814] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Creating folder: Project (45f024cfdbee4e75a7c7cc8568d69578). Parent ref: group-v639956. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1496.978072] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5711b35c-861a-4e9d-a2cd-3c95bf61e6ee {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.988188] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Created folder: Project (45f024cfdbee4e75a7c7cc8568d69578) in parent group-v639956. [ 1496.988364] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Creating folder: Instances. Parent ref: group-v640040. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1496.988568] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8903bcdd-178f-4160-a505-41505cb5d518 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.996707] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Created folder: Instances in parent group-v640040. [ 1496.996927] env[68040]: DEBUG oslo.service.loopingcall [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1496.997114] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Creating VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1496.997300] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c0594c53-d1d0-4599-bd2e-3c1cd9bab130 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.013058] env[68040]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1497.013058] env[68040]: value = "task-3200292" [ 1497.013058] env[68040]: _type = "Task" [ 1497.013058] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.023491] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200292, 'name': CreateVM_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.522707] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200292, 'name': CreateVM_Task, 'duration_secs': 0.248573} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1497.523276] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Created VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1497.523701] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1497.523858] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1497.524171] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1497.524405] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db198e58-3b81-47cb-ba36-d616fc952d0c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.530088] env[68040]: DEBUG oslo_vmware.api [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Waiting for the task: (returnval){ [ 1497.530088] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52d4efa4-6dd8-95ec-6787-65bcef2451a9" [ 1497.530088] env[68040]: _type = "Task" [ 1497.530088] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.535697] env[68040]: DEBUG oslo_vmware.api [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52d4efa4-6dd8-95ec-6787-65bcef2451a9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.039757] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1498.040040] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Processing image 8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1498.040256] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1506.019099] env[68040]: DEBUG oslo_concurrency.lockutils [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Acquiring lock "2e44ead1-4676-4d9b-bbae-5082f505fc8b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1506.019541] env[68040]: DEBUG oslo_concurrency.lockutils [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Lock "2e44ead1-4676-4d9b-bbae-5082f505fc8b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1508.983807] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1510.983584] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1510.983854] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1510.996996] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1510.997282] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1510.997454] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1510.997615] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68040) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1510.998793] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd23fdcd-98a4-4828-bf07-968676b6df77 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.007503] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bd00c45-c752-44a0-ae92-8aef99497af2 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.021129] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-083cabdd-c085-41a2-a987-e56716784a19 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.027052] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4798f84-3b64-498d-9385-2e2341c91f40 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.056191] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180989MB free_disk=125GB free_vcpus=48 pci_devices=None {{(pid=68040) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1511.056346] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1511.056544] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1511.198322] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1511.198488] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1511.198620] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance e92b662c-b458-49d8-ac2a-00ae6046a11b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1511.198743] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 57cd94c2-aec3-427e-9b9f-a444fe291974 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1511.198864] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 87a7851e-d6fe-481a-8abb-5732e281cb64 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1511.198982] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance c581d685-7ea0-41f8-b911-ff1dce1b46c7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1511.199114] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 4a08d3e3-5e84-4f34-b418-2c18eadbef25 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1511.199232] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 268b5613-b132-49ed-a45b-bc88132177cf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1511.199345] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 4dfa01f8-53a0-4ee4-9b00-93017144ea0b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1511.199459] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1511.211043] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance f9d3a788-4f1b-46f7-83ab-dd6884f68d2e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1511.223246] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 83475c46-38de-4918-91b4-b53dcf3ead77 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1511.233055] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 6541b54b-214d-432c-8ae6-5de4ed99390f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1511.242510] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance d8f18a76-588b-4329-a167-2a571f82455f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1511.251591] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 285249df-f5b4-4a68-89fe-9281fe1573e5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1511.260572] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance d17db434-040f-4859-913e-bfd658be14b3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1511.270313] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1511.279812] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 221a5bbe-7168-4f5c-ab49-8a149545655f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1511.289242] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 031481de-d52f-4f3f-80e5-0d0d6803d624 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1511.298602] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 2e44ead1-4676-4d9b-bbae-5082f505fc8b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1511.298835] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1511.298987] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1511.511457] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-713ce9b1-c45d-4906-8e42-4633cc7bd423 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.519009] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a5445a7-9af7-4825-8feb-aaed232cd9d6 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.547883] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81356f25-9412-4ba0-adf0-ab0dd193c0ef {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.554606] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf1db8bf-f2f5-4cab-8515-4ff6540c3e7d {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.567653] env[68040]: DEBUG nova.compute.provider_tree [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1511.575834] env[68040]: DEBUG nova.scheduler.client.report [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1511.591834] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68040) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1511.591988] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.535s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1512.173840] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e9d49cef-5cff-476b-9ebb-9e52de575d93 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Acquiring lock "f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1512.591949] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1512.984469] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1512.984704] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Cleaning up deleted instances {{(pid=68040) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11219}} [ 1512.997453] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] There are 0 instances to clean {{(pid=68040) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 1513.997395] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1514.985061] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1514.985211] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Starting heal instance info cache {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1514.985264] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Rebuilding the list of instances to heal {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1515.007241] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1515.007549] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1515.007549] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1515.007628] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1515.007755] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1515.007900] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1515.008203] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1515.008357] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1515.008487] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1515.008613] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1515.008739] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Didn't find any instances for network info cache update. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1515.009206] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1515.983520] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1515.983694] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68040) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1516.980632] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1516.983535] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1516.983773] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Cleaning up deleted instances with incomplete migration {{(pid=68040) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11257}} [ 1523.300112] env[68040]: DEBUG oslo_concurrency.lockutils [None req-1ab7bc51-6f8f-43ee-a690-498424423e90 tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Acquiring lock "f89a378a-376a-48d0-a01b-75c5bb4d8cd9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1523.300455] env[68040]: DEBUG oslo_concurrency.lockutils [None req-1ab7bc51-6f8f-43ee-a690-498424423e90 tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Lock "f89a378a-376a-48d0-a01b-75c5bb4d8cd9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1523.984058] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1542.016128] env[68040]: WARNING oslo_vmware.rw_handles [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1542.016128] env[68040]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1542.016128] env[68040]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1542.016128] env[68040]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1542.016128] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1542.016128] env[68040]: ERROR oslo_vmware.rw_handles response.begin() [ 1542.016128] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1542.016128] env[68040]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1542.016128] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1542.016128] env[68040]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1542.016128] env[68040]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1542.016128] env[68040]: ERROR oslo_vmware.rw_handles [ 1542.016729] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Downloaded image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to vmware_temp/9067f534-7f58-4b55-be81-23d77e3fbc83/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1542.018834] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Caching image {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1542.019135] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Copying Virtual Disk [datastore2] vmware_temp/9067f534-7f58-4b55-be81-23d77e3fbc83/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk to [datastore2] vmware_temp/9067f534-7f58-4b55-be81-23d77e3fbc83/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk {{(pid=68040) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1542.019430] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1f6bfffc-bd62-4a59-a586-2634a6d82c88 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.027588] env[68040]: DEBUG oslo_vmware.api [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Waiting for the task: (returnval){ [ 1542.027588] env[68040]: value = "task-3200293" [ 1542.027588] env[68040]: _type = "Task" [ 1542.027588] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1542.035347] env[68040]: DEBUG oslo_vmware.api [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Task: {'id': task-3200293, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.537761] env[68040]: DEBUG oslo_vmware.exceptions [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Fault InvalidArgument not matched. {{(pid=68040) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1542.538056] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1542.538643] env[68040]: ERROR nova.compute.manager [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1542.538643] env[68040]: Faults: ['InvalidArgument'] [ 1542.538643] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Traceback (most recent call last): [ 1542.538643] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1542.538643] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] yield resources [ 1542.538643] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1542.538643] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] self.driver.spawn(context, instance, image_meta, [ 1542.538643] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1542.538643] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1542.538643] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1542.538643] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] self._fetch_image_if_missing(context, vi) [ 1542.538643] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1542.539058] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] image_cache(vi, tmp_image_ds_loc) [ 1542.539058] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1542.539058] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] vm_util.copy_virtual_disk( [ 1542.539058] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1542.539058] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] session._wait_for_task(vmdk_copy_task) [ 1542.539058] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1542.539058] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] return self.wait_for_task(task_ref) [ 1542.539058] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1542.539058] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] return evt.wait() [ 1542.539058] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1542.539058] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] result = hub.switch() [ 1542.539058] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1542.539058] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] return self.greenlet.switch() [ 1542.539495] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1542.539495] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] self.f(*self.args, **self.kw) [ 1542.539495] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1542.539495] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] raise exceptions.translate_fault(task_info.error) [ 1542.539495] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1542.539495] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Faults: ['InvalidArgument'] [ 1542.539495] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] [ 1542.539495] env[68040]: INFO nova.compute.manager [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Terminating instance [ 1542.540570] env[68040]: DEBUG oslo_concurrency.lockutils [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1542.540805] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1542.541422] env[68040]: DEBUG nova.compute.manager [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1542.541616] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1542.541869] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4ed91b5c-213e-423b-bba2-ad2c8c7d9d44 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.544107] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc60846a-ec52-4cf9-8c1a-80c5a65f1240 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.552106] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Unregistering the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1542.552340] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1e0bfb7c-22d4-4ff2-88de-2d8dca9bd637 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.554516] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1542.554692] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68040) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1542.555673] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e2a9cca9-44a3-46fd-8c90-0b44bfd358d6 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.560171] env[68040]: DEBUG oslo_vmware.api [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Waiting for the task: (returnval){ [ 1542.560171] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52879bec-29ff-d1c8-df44-e60f0583ca35" [ 1542.560171] env[68040]: _type = "Task" [ 1542.560171] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1542.574449] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Preparing fetch location {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1542.574694] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Creating directory with path [datastore2] vmware_temp/0b76fcb3-9e9d-46e8-84c0-43540384f1bf/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1542.574912] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0302f156-94d9-4ec3-ad93-e2ae629df63c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.586186] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Created directory with path [datastore2] vmware_temp/0b76fcb3-9e9d-46e8-84c0-43540384f1bf/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1542.586413] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Fetch image to [datastore2] vmware_temp/0b76fcb3-9e9d-46e8-84c0-43540384f1bf/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1542.586617] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to [datastore2] vmware_temp/0b76fcb3-9e9d-46e8-84c0-43540384f1bf/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1542.587397] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6857b38-4a27-43d9-8707-915c7d624b8f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.594071] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-239a3ea0-ca01-4653-b8ce-ecfb9469f279 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.603089] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92c628c9-c1c4-4792-ae61-e3679d4a5d6f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.635914] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bda7d43d-3883-45e6-8a84-0ae8afeacf3d {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.638573] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Unregistered the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1542.638791] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Deleting contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1542.638969] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Deleting the datastore file [datastore2] e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1542.639221] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cc513e73-db04-4e69-9bcb-faa785c117ca {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.645435] env[68040]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-1ce5e34e-9038-4579-91b5-ec205e1cbd48 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.647172] env[68040]: DEBUG oslo_vmware.api [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Waiting for the task: (returnval){ [ 1542.647172] env[68040]: value = "task-3200295" [ 1542.647172] env[68040]: _type = "Task" [ 1542.647172] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1542.670527] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1542.724808] env[68040]: DEBUG oslo_vmware.rw_handles [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0b76fcb3-9e9d-46e8-84c0-43540384f1bf/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1542.786443] env[68040]: DEBUG oslo_vmware.rw_handles [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Completed reading data from the image iterator. {{(pid=68040) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1542.786650] env[68040]: DEBUG oslo_vmware.rw_handles [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0b76fcb3-9e9d-46e8-84c0-43540384f1bf/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1543.157253] env[68040]: DEBUG oslo_vmware.api [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Task: {'id': task-3200295, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.074164} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1543.157507] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Deleted the datastore file {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1543.157686] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Deleted contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1543.157860] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1543.158051] env[68040]: INFO nova.compute.manager [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1543.160295] env[68040]: DEBUG nova.compute.claims [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Aborting claim: {{(pid=68040) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1543.160472] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1543.160717] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1543.368078] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4705354-84c6-4157-8657-bb3c31db407d {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.376485] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90aa996e-0cb2-413a-9cf3-f1fa6a394755 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.404870] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83c265e0-ed52-44be-ac8d-b87727a9961f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.411783] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-278f2580-97c6-40ff-90b0-6182e142533c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.425449] env[68040]: DEBUG nova.compute.provider_tree [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1543.434078] env[68040]: DEBUG nova.scheduler.client.report [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1543.447602] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.287s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1543.448132] env[68040]: ERROR nova.compute.manager [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1543.448132] env[68040]: Faults: ['InvalidArgument'] [ 1543.448132] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Traceback (most recent call last): [ 1543.448132] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1543.448132] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] self.driver.spawn(context, instance, image_meta, [ 1543.448132] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1543.448132] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1543.448132] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1543.448132] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] self._fetch_image_if_missing(context, vi) [ 1543.448132] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1543.448132] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] image_cache(vi, tmp_image_ds_loc) [ 1543.448132] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1543.448582] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] vm_util.copy_virtual_disk( [ 1543.448582] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1543.448582] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] session._wait_for_task(vmdk_copy_task) [ 1543.448582] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1543.448582] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] return self.wait_for_task(task_ref) [ 1543.448582] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1543.448582] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] return evt.wait() [ 1543.448582] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1543.448582] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] result = hub.switch() [ 1543.448582] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1543.448582] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] return self.greenlet.switch() [ 1543.448582] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1543.448582] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] self.f(*self.args, **self.kw) [ 1543.448938] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1543.448938] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] raise exceptions.translate_fault(task_info.error) [ 1543.448938] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1543.448938] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Faults: ['InvalidArgument'] [ 1543.448938] env[68040]: ERROR nova.compute.manager [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] [ 1543.448938] env[68040]: DEBUG nova.compute.utils [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] VimFaultException {{(pid=68040) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1543.450151] env[68040]: DEBUG nova.compute.manager [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Build of instance e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd was re-scheduled: A specified parameter was not correct: fileType [ 1543.450151] env[68040]: Faults: ['InvalidArgument'] {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1543.450524] env[68040]: DEBUG nova.compute.manager [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Unplugging VIFs for instance {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1543.450736] env[68040]: DEBUG nova.compute.manager [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1543.450920] env[68040]: DEBUG nova.compute.manager [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1543.451102] env[68040]: DEBUG nova.network.neutron [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1543.791661] env[68040]: DEBUG nova.network.neutron [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1543.804332] env[68040]: INFO nova.compute.manager [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Took 0.35 seconds to deallocate network for instance. [ 1543.891212] env[68040]: INFO nova.scheduler.client.report [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Deleted allocations for instance e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd [ 1543.913839] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ebaa0e70-9507-4fbe-a0e2-ecbae677d93a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Lock "e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 640.659s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1543.914950] env[68040]: DEBUG oslo_concurrency.lockutils [None req-084ca5b5-a407-4835-a64b-021154e15ac9 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Lock "e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 444.304s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1543.915185] env[68040]: DEBUG oslo_concurrency.lockutils [None req-084ca5b5-a407-4835-a64b-021154e15ac9 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Acquiring lock "e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1543.915395] env[68040]: DEBUG oslo_concurrency.lockutils [None req-084ca5b5-a407-4835-a64b-021154e15ac9 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Lock "e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1543.915563] env[68040]: DEBUG oslo_concurrency.lockutils [None req-084ca5b5-a407-4835-a64b-021154e15ac9 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Lock "e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1543.918756] env[68040]: INFO nova.compute.manager [None req-084ca5b5-a407-4835-a64b-021154e15ac9 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Terminating instance [ 1543.920216] env[68040]: DEBUG nova.compute.manager [None req-084ca5b5-a407-4835-a64b-021154e15ac9 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1543.920216] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-084ca5b5-a407-4835-a64b-021154e15ac9 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1543.920704] env[68040]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6d86f895-992c-44a2-865f-fcdb53179d51 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.930330] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa743d7d-22c0-4f11-8d30-5a55e33b58fe {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.942814] env[68040]: DEBUG nova.compute.manager [None req-bf318a9d-6437-4000-a9fe-936780bbf9c8 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] [instance: f9d3a788-4f1b-46f7-83ab-dd6884f68d2e] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1543.964386] env[68040]: WARNING nova.virt.vmwareapi.vmops [None req-084ca5b5-a407-4835-a64b-021154e15ac9 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd could not be found. [ 1543.964480] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-084ca5b5-a407-4835-a64b-021154e15ac9 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1543.964585] env[68040]: INFO nova.compute.manager [None req-084ca5b5-a407-4835-a64b-021154e15ac9 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1543.964832] env[68040]: DEBUG oslo.service.loopingcall [None req-084ca5b5-a407-4835-a64b-021154e15ac9 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1543.966179] env[68040]: DEBUG nova.compute.manager [-] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1543.966179] env[68040]: DEBUG nova.network.neutron [-] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1543.972280] env[68040]: DEBUG nova.compute.manager [None req-bf318a9d-6437-4000-a9fe-936780bbf9c8 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] [instance: f9d3a788-4f1b-46f7-83ab-dd6884f68d2e] Instance disappeared before build. {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1543.993040] env[68040]: DEBUG nova.network.neutron [-] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1543.995092] env[68040]: DEBUG oslo_concurrency.lockutils [None req-bf318a9d-6437-4000-a9fe-936780bbf9c8 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Lock "f9d3a788-4f1b-46f7-83ab-dd6884f68d2e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 228.285s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1544.001869] env[68040]: INFO nova.compute.manager [-] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] Took 0.04 seconds to deallocate network for instance. [ 1544.009084] env[68040]: DEBUG nova.compute.manager [None req-1f92486e-a8ce-4c1c-a14d-d0d1363460a8 tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] [instance: 83475c46-38de-4918-91b4-b53dcf3ead77] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1544.032429] env[68040]: DEBUG nova.compute.manager [None req-1f92486e-a8ce-4c1c-a14d-d0d1363460a8 tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] [instance: 83475c46-38de-4918-91b4-b53dcf3ead77] Instance disappeared before build. {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1544.073132] env[68040]: DEBUG oslo_concurrency.lockutils [None req-1f92486e-a8ce-4c1c-a14d-d0d1363460a8 tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Lock "83475c46-38de-4918-91b4-b53dcf3ead77" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 219.264s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1544.083851] env[68040]: DEBUG nova.compute.manager [None req-b569d8ef-a337-4d3d-8463-ef8abdd9daf1 tempest-ListServersNegativeTestJSON-595429082 tempest-ListServersNegativeTestJSON-595429082-project-member] [instance: 6541b54b-214d-432c-8ae6-5de4ed99390f] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1544.112483] env[68040]: DEBUG nova.compute.manager [None req-b569d8ef-a337-4d3d-8463-ef8abdd9daf1 tempest-ListServersNegativeTestJSON-595429082 tempest-ListServersNegativeTestJSON-595429082-project-member] [instance: 6541b54b-214d-432c-8ae6-5de4ed99390f] Instance disappeared before build. {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1544.132615] env[68040]: DEBUG oslo_concurrency.lockutils [None req-084ca5b5-a407-4835-a64b-021154e15ac9 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Lock "e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.218s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1544.133512] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 332.944s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1544.133691] env[68040]: INFO nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd] During sync_power_state the instance has a pending task (deleting). Skip. [ 1544.133867] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "e94b6f7c-1f37-4801-9abc-42b6f2c7b1bd" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1544.136540] env[68040]: DEBUG oslo_concurrency.lockutils [None req-b569d8ef-a337-4d3d-8463-ef8abdd9daf1 tempest-ListServersNegativeTestJSON-595429082 tempest-ListServersNegativeTestJSON-595429082-project-member] Lock "6541b54b-214d-432c-8ae6-5de4ed99390f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 212.580s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1544.145027] env[68040]: DEBUG nova.compute.manager [None req-b569d8ef-a337-4d3d-8463-ef8abdd9daf1 tempest-ListServersNegativeTestJSON-595429082 tempest-ListServersNegativeTestJSON-595429082-project-member] [instance: d8f18a76-588b-4329-a167-2a571f82455f] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1544.168058] env[68040]: DEBUG nova.compute.manager [None req-b569d8ef-a337-4d3d-8463-ef8abdd9daf1 tempest-ListServersNegativeTestJSON-595429082 tempest-ListServersNegativeTestJSON-595429082-project-member] [instance: d8f18a76-588b-4329-a167-2a571f82455f] Instance disappeared before build. {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1544.192417] env[68040]: DEBUG oslo_concurrency.lockutils [None req-b569d8ef-a337-4d3d-8463-ef8abdd9daf1 tempest-ListServersNegativeTestJSON-595429082 tempest-ListServersNegativeTestJSON-595429082-project-member] Lock "d8f18a76-588b-4329-a167-2a571f82455f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 212.612s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1544.201928] env[68040]: DEBUG nova.compute.manager [None req-b569d8ef-a337-4d3d-8463-ef8abdd9daf1 tempest-ListServersNegativeTestJSON-595429082 tempest-ListServersNegativeTestJSON-595429082-project-member] [instance: 285249df-f5b4-4a68-89fe-9281fe1573e5] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1544.228418] env[68040]: DEBUG nova.compute.manager [None req-b569d8ef-a337-4d3d-8463-ef8abdd9daf1 tempest-ListServersNegativeTestJSON-595429082 tempest-ListServersNegativeTestJSON-595429082-project-member] [instance: 285249df-f5b4-4a68-89fe-9281fe1573e5] Instance disappeared before build. {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1544.249403] env[68040]: DEBUG oslo_concurrency.lockutils [None req-b569d8ef-a337-4d3d-8463-ef8abdd9daf1 tempest-ListServersNegativeTestJSON-595429082 tempest-ListServersNegativeTestJSON-595429082-project-member] Lock "285249df-f5b4-4a68-89fe-9281fe1573e5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 212.637s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1544.262068] env[68040]: DEBUG nova.compute.manager [None req-951941ec-b7a2-45a6-a027-60dc5719924c tempest-ServersListShow296Test-218947232 tempest-ServersListShow296Test-218947232-project-member] [instance: d17db434-040f-4859-913e-bfd658be14b3] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1544.285850] env[68040]: DEBUG nova.compute.manager [None req-951941ec-b7a2-45a6-a027-60dc5719924c tempest-ServersListShow296Test-218947232 tempest-ServersListShow296Test-218947232-project-member] [instance: d17db434-040f-4859-913e-bfd658be14b3] Instance disappeared before build. {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1544.309306] env[68040]: DEBUG oslo_concurrency.lockutils [None req-951941ec-b7a2-45a6-a027-60dc5719924c tempest-ServersListShow296Test-218947232 tempest-ServersListShow296Test-218947232-project-member] Lock "d17db434-040f-4859-913e-bfd658be14b3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 198.356s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1544.319135] env[68040]: DEBUG nova.compute.manager [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1544.376573] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1544.376895] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1544.378394] env[68040]: INFO nova.compute.claims [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1544.569927] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ce850a0-3234-4e90-91f3-15495dbefb51 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.577606] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03142999-1a56-4786-8b07-ef129f4e287d {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.606303] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f87ec6d-67ba-426f-9a87-54fb51380977 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.612690] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7578a2bb-833e-41cb-88a0-2c947acc25c3 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.625067] env[68040]: DEBUG nova.compute.provider_tree [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1544.633545] env[68040]: DEBUG nova.scheduler.client.report [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1544.647050] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.270s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1544.647154] env[68040]: DEBUG nova.compute.manager [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Start building networks asynchronously for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1544.680772] env[68040]: DEBUG nova.compute.utils [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Using /dev/sd instead of None {{(pid=68040) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1544.682268] env[68040]: DEBUG nova.compute.manager [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Allocating IP information in the background. {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1544.682480] env[68040]: DEBUG nova.network.neutron [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] allocate_for_instance() {{(pid=68040) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1544.690358] env[68040]: DEBUG nova.compute.manager [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Start building block device mappings for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1544.751459] env[68040]: DEBUG nova.compute.manager [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Start spawning the instance on the hypervisor. {{(pid=68040) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1544.762994] env[68040]: DEBUG nova.policy [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '35623be94b514118854725c498805238', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3619283094694fa697b4e1993598c770', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68040) authorize /opt/stack/nova/nova/policy.py:203}} [ 1544.778626] env[68040]: DEBUG nova.virt.hardware [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:59:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:59:33Z,direct_url=,disk_format='vmdk',id=8c308313-03d5-40b6-a5fe-9037e32dc76e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0770d674a39c40089de0aade9440b370',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:59:34Z,virtual_size=,visibility=), allow threads: False {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1544.778910] env[68040]: DEBUG nova.virt.hardware [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Flavor limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1544.779088] env[68040]: DEBUG nova.virt.hardware [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Image limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1544.779279] env[68040]: DEBUG nova.virt.hardware [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Flavor pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1544.779427] env[68040]: DEBUG nova.virt.hardware [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Image pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1544.779575] env[68040]: DEBUG nova.virt.hardware [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1544.779783] env[68040]: DEBUG nova.virt.hardware [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1544.779942] env[68040]: DEBUG nova.virt.hardware [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1544.780127] env[68040]: DEBUG nova.virt.hardware [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Got 1 possible topologies {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1544.780295] env[68040]: DEBUG nova.virt.hardware [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1544.780483] env[68040]: DEBUG nova.virt.hardware [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1544.781387] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-450f2a15-93c4-4c37-9f3a-674bb783337f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.789386] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0294d022-0d61-46f2-b858-ed235f3cf731 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.128413] env[68040]: DEBUG nova.network.neutron [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Successfully created port: e28aba84-77fb-4478-b36d-556a599d9f8c {{(pid=68040) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1545.843857] env[68040]: DEBUG nova.compute.manager [req-d858bfe4-7610-455f-a728-92100847407d req-175d043d-2b0e-47c9-a796-e754ff00c280 service nova] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Received event network-vif-plugged-e28aba84-77fb-4478-b36d-556a599d9f8c {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1545.844145] env[68040]: DEBUG oslo_concurrency.lockutils [req-d858bfe4-7610-455f-a728-92100847407d req-175d043d-2b0e-47c9-a796-e754ff00c280 service nova] Acquiring lock "c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1545.844335] env[68040]: DEBUG oslo_concurrency.lockutils [req-d858bfe4-7610-455f-a728-92100847407d req-175d043d-2b0e-47c9-a796-e754ff00c280 service nova] Lock "c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1545.844510] env[68040]: DEBUG oslo_concurrency.lockutils [req-d858bfe4-7610-455f-a728-92100847407d req-175d043d-2b0e-47c9-a796-e754ff00c280 service nova] Lock "c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1545.844680] env[68040]: DEBUG nova.compute.manager [req-d858bfe4-7610-455f-a728-92100847407d req-175d043d-2b0e-47c9-a796-e754ff00c280 service nova] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] No waiting events found dispatching network-vif-plugged-e28aba84-77fb-4478-b36d-556a599d9f8c {{(pid=68040) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1545.844848] env[68040]: WARNING nova.compute.manager [req-d858bfe4-7610-455f-a728-92100847407d req-175d043d-2b0e-47c9-a796-e754ff00c280 service nova] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Received unexpected event network-vif-plugged-e28aba84-77fb-4478-b36d-556a599d9f8c for instance with vm_state building and task_state spawning. [ 1545.894484] env[68040]: DEBUG nova.network.neutron [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Successfully updated port: e28aba84-77fb-4478-b36d-556a599d9f8c {{(pid=68040) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1545.915057] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Acquiring lock "refresh_cache-c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1545.915269] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Acquired lock "refresh_cache-c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1545.915456] env[68040]: DEBUG nova.network.neutron [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Building network info cache for instance {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1546.013579] env[68040]: DEBUG nova.network.neutron [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1546.490099] env[68040]: DEBUG nova.network.neutron [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Updating instance_info_cache with network_info: [{"id": "e28aba84-77fb-4478-b36d-556a599d9f8c", "address": "fa:16:3e:2e:ab:60", "network": {"id": "df503edd-1324-4c18-a3a1-9beefaa3cc82", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1845064548-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3619283094694fa697b4e1993598c770", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2c424c9-6446-4b2a-af8c-4d9c29117c39", "external-id": "nsx-vlan-transportzone-437", "segmentation_id": 437, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape28aba84-77", "ovs_interfaceid": "e28aba84-77fb-4478-b36d-556a599d9f8c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1546.501664] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Releasing lock "refresh_cache-c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1546.501664] env[68040]: DEBUG nova.compute.manager [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Instance network_info: |[{"id": "e28aba84-77fb-4478-b36d-556a599d9f8c", "address": "fa:16:3e:2e:ab:60", "network": {"id": "df503edd-1324-4c18-a3a1-9beefaa3cc82", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1845064548-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3619283094694fa697b4e1993598c770", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2c424c9-6446-4b2a-af8c-4d9c29117c39", "external-id": "nsx-vlan-transportzone-437", "segmentation_id": 437, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape28aba84-77", "ovs_interfaceid": "e28aba84-77fb-4478-b36d-556a599d9f8c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1546.501888] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2e:ab:60', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f2c424c9-6446-4b2a-af8c-4d9c29117c39', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e28aba84-77fb-4478-b36d-556a599d9f8c', 'vif_model': 'vmxnet3'}] {{(pid=68040) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1546.509053] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Creating folder: Project (3619283094694fa697b4e1993598c770). Parent ref: group-v639956. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1546.509544] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-520f1fc6-173e-42ab-8643-31e68fe974c7 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.520304] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Created folder: Project (3619283094694fa697b4e1993598c770) in parent group-v639956. [ 1546.520497] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Creating folder: Instances. Parent ref: group-v640043. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1546.520736] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-abc2d377-e836-4f73-8e62-9af9cd9d78a3 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.528436] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Created folder: Instances in parent group-v640043. [ 1546.528654] env[68040]: DEBUG oslo.service.loopingcall [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1546.528830] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Creating VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1546.529028] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-14d7f1f3-553c-4f87-8b8c-841fc4e2cb62 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.546202] env[68040]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1546.546202] env[68040]: value = "task-3200298" [ 1546.546202] env[68040]: _type = "Task" [ 1546.546202] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1546.553111] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200298, 'name': CreateVM_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.055878] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200298, 'name': CreateVM_Task, 'duration_secs': 0.296107} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1547.056152] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Created VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1547.056726] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1547.056893] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1547.057235] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1547.057480] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-060de9b5-81fb-406d-9ae6-d82a044dd9a1 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.061594] env[68040]: DEBUG oslo_vmware.api [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Waiting for the task: (returnval){ [ 1547.061594] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]5289e22e-f217-6f02-02e7-31835cbd2dd1" [ 1547.061594] env[68040]: _type = "Task" [ 1547.061594] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1547.068993] env[68040]: DEBUG oslo_vmware.api [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]5289e22e-f217-6f02-02e7-31835cbd2dd1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.571631] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1547.571903] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Processing image 8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1547.572134] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1547.953828] env[68040]: DEBUG nova.compute.manager [req-ed3d39ff-f8dd-4ebf-9741-de624b052f15 req-9a87ece4-5817-4709-820d-4510a9c1c24c service nova] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Received event network-changed-e28aba84-77fb-4478-b36d-556a599d9f8c {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1547.954083] env[68040]: DEBUG nova.compute.manager [req-ed3d39ff-f8dd-4ebf-9741-de624b052f15 req-9a87ece4-5817-4709-820d-4510a9c1c24c service nova] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Refreshing instance network info cache due to event network-changed-e28aba84-77fb-4478-b36d-556a599d9f8c. {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1547.954265] env[68040]: DEBUG oslo_concurrency.lockutils [req-ed3d39ff-f8dd-4ebf-9741-de624b052f15 req-9a87ece4-5817-4709-820d-4510a9c1c24c service nova] Acquiring lock "refresh_cache-c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1547.954366] env[68040]: DEBUG oslo_concurrency.lockutils [req-ed3d39ff-f8dd-4ebf-9741-de624b052f15 req-9a87ece4-5817-4709-820d-4510a9c1c24c service nova] Acquired lock "refresh_cache-c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1547.954530] env[68040]: DEBUG nova.network.neutron [req-ed3d39ff-f8dd-4ebf-9741-de624b052f15 req-9a87ece4-5817-4709-820d-4510a9c1c24c service nova] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Refreshing network info cache for port e28aba84-77fb-4478-b36d-556a599d9f8c {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1548.291480] env[68040]: DEBUG nova.network.neutron [req-ed3d39ff-f8dd-4ebf-9741-de624b052f15 req-9a87ece4-5817-4709-820d-4510a9c1c24c service nova] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Updated VIF entry in instance network info cache for port e28aba84-77fb-4478-b36d-556a599d9f8c. {{(pid=68040) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1548.291864] env[68040]: DEBUG nova.network.neutron [req-ed3d39ff-f8dd-4ebf-9741-de624b052f15 req-9a87ece4-5817-4709-820d-4510a9c1c24c service nova] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Updating instance_info_cache with network_info: [{"id": "e28aba84-77fb-4478-b36d-556a599d9f8c", "address": "fa:16:3e:2e:ab:60", "network": {"id": "df503edd-1324-4c18-a3a1-9beefaa3cc82", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1845064548-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3619283094694fa697b4e1993598c770", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f2c424c9-6446-4b2a-af8c-4d9c29117c39", "external-id": "nsx-vlan-transportzone-437", "segmentation_id": 437, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape28aba84-77", "ovs_interfaceid": "e28aba84-77fb-4478-b36d-556a599d9f8c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1548.300981] env[68040]: DEBUG oslo_concurrency.lockutils [req-ed3d39ff-f8dd-4ebf-9741-de624b052f15 req-9a87ece4-5817-4709-820d-4510a9c1c24c service nova] Releasing lock "refresh_cache-c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1569.993346] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1570.983918] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1572.984356] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1572.997311] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1572.997545] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1572.997805] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1572.997978] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68040) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1573.001585] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d470d131-6c7a-4987-9814-e7a1e31ce48f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.009252] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aa77e6b-32bd-4a74-a58f-fec6f9a683ba {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.023779] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-738eb652-d950-4b02-a0aa-bc0505ac6200 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.029926] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74923bb1-ffcf-43e5-831e-7b0704c043e4 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.059443] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180988MB free_disk=125GB free_vcpus=48 pci_devices=None {{(pid=68040) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1573.059588] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1573.059777] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1573.133165] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1573.133343] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance e92b662c-b458-49d8-ac2a-00ae6046a11b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1573.133474] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 57cd94c2-aec3-427e-9b9f-a444fe291974 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1573.133598] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 87a7851e-d6fe-481a-8abb-5732e281cb64 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1573.133755] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance c581d685-7ea0-41f8-b911-ff1dce1b46c7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1573.133901] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 4a08d3e3-5e84-4f34-b418-2c18eadbef25 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1573.134043] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 268b5613-b132-49ed-a45b-bc88132177cf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1573.134179] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 4dfa01f8-53a0-4ee4-9b00-93017144ea0b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1573.134300] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1573.134418] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1573.144773] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 221a5bbe-7168-4f5c-ab49-8a149545655f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1573.154824] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 031481de-d52f-4f3f-80e5-0d0d6803d624 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1573.164151] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 2e44ead1-4676-4d9b-bbae-5082f505fc8b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1573.174571] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance f89a378a-376a-48d0-a01b-75c5bb4d8cd9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1573.174793] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1573.174941] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1573.323334] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a545f89d-c3c4-41e4-9645-026178924835 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.330581] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b08d6fc4-a4b5-4172-a159-8692f45063e6 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.359052] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38e909da-73f4-4cc8-8839-be39ae99bde8 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.365522] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8db33d0c-c494-4b8b-b722-d9b22a41452e {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.377841] env[68040]: DEBUG nova.compute.provider_tree [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1573.386196] env[68040]: DEBUG nova.scheduler.client.report [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1573.399169] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68040) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1573.399349] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.340s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1574.399543] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1574.399800] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1576.984045] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1576.984436] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Starting heal instance info cache {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1576.984436] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Rebuilding the list of instances to heal {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1577.006146] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1577.006313] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1577.006418] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1577.006546] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1577.006670] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1577.006794] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1577.006916] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1577.007051] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1577.007178] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1577.007300] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1577.007421] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Didn't find any instances for network info cache update. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1577.007879] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1577.008085] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1577.008221] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68040) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1579.003881] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1581.980248] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1588.171018] env[68040]: WARNING oslo_vmware.rw_handles [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1588.171018] env[68040]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1588.171018] env[68040]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1588.171018] env[68040]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1588.171018] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1588.171018] env[68040]: ERROR oslo_vmware.rw_handles response.begin() [ 1588.171018] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1588.171018] env[68040]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1588.171018] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1588.171018] env[68040]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1588.171018] env[68040]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1588.171018] env[68040]: ERROR oslo_vmware.rw_handles [ 1588.171018] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Downloaded image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to vmware_temp/0b76fcb3-9e9d-46e8-84c0-43540384f1bf/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1588.171784] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Caching image {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1588.171784] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Copying Virtual Disk [datastore2] vmware_temp/0b76fcb3-9e9d-46e8-84c0-43540384f1bf/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk to [datastore2] vmware_temp/0b76fcb3-9e9d-46e8-84c0-43540384f1bf/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk {{(pid=68040) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1588.171784] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-30d2ece4-2621-4bb6-9022-20d22d782015 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.178433] env[68040]: DEBUG oslo_vmware.api [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Waiting for the task: (returnval){ [ 1588.178433] env[68040]: value = "task-3200299" [ 1588.178433] env[68040]: _type = "Task" [ 1588.178433] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1588.186384] env[68040]: DEBUG oslo_vmware.api [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Task: {'id': task-3200299, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.689667] env[68040]: DEBUG oslo_vmware.exceptions [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Fault InvalidArgument not matched. {{(pid=68040) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1588.689954] env[68040]: DEBUG oslo_concurrency.lockutils [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1588.690543] env[68040]: ERROR nova.compute.manager [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1588.690543] env[68040]: Faults: ['InvalidArgument'] [ 1588.690543] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Traceback (most recent call last): [ 1588.690543] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1588.690543] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] yield resources [ 1588.690543] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1588.690543] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] self.driver.spawn(context, instance, image_meta, [ 1588.690543] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1588.690543] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1588.690543] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1588.690543] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] self._fetch_image_if_missing(context, vi) [ 1588.690543] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1588.691084] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] image_cache(vi, tmp_image_ds_loc) [ 1588.691084] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1588.691084] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] vm_util.copy_virtual_disk( [ 1588.691084] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1588.691084] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] session._wait_for_task(vmdk_copy_task) [ 1588.691084] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1588.691084] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] return self.wait_for_task(task_ref) [ 1588.691084] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1588.691084] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] return evt.wait() [ 1588.691084] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1588.691084] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] result = hub.switch() [ 1588.691084] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1588.691084] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] return self.greenlet.switch() [ 1588.691523] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1588.691523] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] self.f(*self.args, **self.kw) [ 1588.691523] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1588.691523] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] raise exceptions.translate_fault(task_info.error) [ 1588.691523] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1588.691523] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Faults: ['InvalidArgument'] [ 1588.691523] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] [ 1588.691523] env[68040]: INFO nova.compute.manager [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Terminating instance [ 1588.692687] env[68040]: DEBUG oslo_concurrency.lockutils [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1588.692900] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1588.693168] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d62a3142-25da-48a4-b1a9-23deab97c058 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.695566] env[68040]: DEBUG nova.compute.manager [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1588.695763] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1588.696559] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fab2349-d8a0-4d83-bf8d-b1d4e8e0d484 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.704445] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Unregistering the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1588.704719] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6d428589-adf3-487e-b529-119659b314c9 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.707409] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1588.707589] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68040) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1588.708659] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f6168d68-db00-4b05-ad81-538fb1b653a6 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.714303] env[68040]: DEBUG oslo_vmware.api [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Waiting for the task: (returnval){ [ 1588.714303] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52f7b792-c403-da1e-3cb2-7c6b8429c640" [ 1588.714303] env[68040]: _type = "Task" [ 1588.714303] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1588.722580] env[68040]: DEBUG oslo_vmware.api [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52f7b792-c403-da1e-3cb2-7c6b8429c640, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.781069] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Unregistered the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1588.781415] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Deleting contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1588.781685] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Deleting the datastore file [datastore2] b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77 {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1588.781994] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ecf48f2f-681d-4caf-a787-a42922274fc2 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.788229] env[68040]: DEBUG oslo_vmware.api [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Waiting for the task: (returnval){ [ 1588.788229] env[68040]: value = "task-3200301" [ 1588.788229] env[68040]: _type = "Task" [ 1588.788229] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1588.796293] env[68040]: DEBUG oslo_vmware.api [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Task: {'id': task-3200301, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.228563] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Preparing fetch location {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1589.229019] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Creating directory with path [datastore2] vmware_temp/92186b1a-bee4-46c5-b2d6-b72aa9115dfd/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1589.229367] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d315fce5-976b-40c1-8e3a-4a027850246d {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.245113] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Created directory with path [datastore2] vmware_temp/92186b1a-bee4-46c5-b2d6-b72aa9115dfd/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1589.245458] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Fetch image to [datastore2] vmware_temp/92186b1a-bee4-46c5-b2d6-b72aa9115dfd/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1589.245745] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to [datastore2] vmware_temp/92186b1a-bee4-46c5-b2d6-b72aa9115dfd/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1589.246941] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a9ee0ae-645c-4f94-8dc5-27b49fdec737 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.256944] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6df08aa1-abeb-4a7c-b22d-225ac641baf1 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.271717] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b680b53-e968-4bad-a0ac-2b426969c197 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.309410] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3c35e4d-73fb-4768-a313-ac1d960bd6ad {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.316840] env[68040]: DEBUG oslo_vmware.api [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Task: {'id': task-3200301, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.082183} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1589.318380] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Deleted the datastore file {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1589.318581] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Deleted contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1589.318759] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1589.318938] env[68040]: INFO nova.compute.manager [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1589.321084] env[68040]: DEBUG nova.compute.claims [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Aborting claim: {{(pid=68040) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1589.321260] env[68040]: DEBUG oslo_concurrency.lockutils [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1589.321479] env[68040]: DEBUG oslo_concurrency.lockutils [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1589.324640] env[68040]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d14abd55-448a-4578-a52d-6d2bc0986568 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.349474] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1589.408178] env[68040]: DEBUG oslo_vmware.rw_handles [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/92186b1a-bee4-46c5-b2d6-b72aa9115dfd/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1589.469637] env[68040]: DEBUG oslo_vmware.rw_handles [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Completed reading data from the image iterator. {{(pid=68040) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1589.469824] env[68040]: DEBUG oslo_vmware.rw_handles [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/92186b1a-bee4-46c5-b2d6-b72aa9115dfd/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1589.609699] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b47c5f2-7ed8-4713-bb4c-f0bd2cc78b80 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.617908] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da1b8e29-8e60-4bc8-85ee-a3f37db2ecd3 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.647314] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c0f04f0-2258-4438-9dfd-00acac487eff {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.655116] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-482b4267-ca32-42c3-8156-6022bf252bac {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.671020] env[68040]: DEBUG nova.compute.provider_tree [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1589.679671] env[68040]: DEBUG nova.scheduler.client.report [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1589.693979] env[68040]: DEBUG oslo_concurrency.lockutils [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.372s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1589.694525] env[68040]: ERROR nova.compute.manager [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1589.694525] env[68040]: Faults: ['InvalidArgument'] [ 1589.694525] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Traceback (most recent call last): [ 1589.694525] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1589.694525] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] self.driver.spawn(context, instance, image_meta, [ 1589.694525] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1589.694525] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1589.694525] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1589.694525] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] self._fetch_image_if_missing(context, vi) [ 1589.694525] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1589.694525] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] image_cache(vi, tmp_image_ds_loc) [ 1589.694525] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1589.694898] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] vm_util.copy_virtual_disk( [ 1589.694898] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1589.694898] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] session._wait_for_task(vmdk_copy_task) [ 1589.694898] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1589.694898] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] return self.wait_for_task(task_ref) [ 1589.694898] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1589.694898] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] return evt.wait() [ 1589.694898] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1589.694898] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] result = hub.switch() [ 1589.694898] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1589.694898] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] return self.greenlet.switch() [ 1589.694898] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1589.694898] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] self.f(*self.args, **self.kw) [ 1589.695266] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1589.695266] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] raise exceptions.translate_fault(task_info.error) [ 1589.695266] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1589.695266] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Faults: ['InvalidArgument'] [ 1589.695266] env[68040]: ERROR nova.compute.manager [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] [ 1589.695266] env[68040]: DEBUG nova.compute.utils [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] VimFaultException {{(pid=68040) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1589.697082] env[68040]: DEBUG nova.compute.manager [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Build of instance b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77 was re-scheduled: A specified parameter was not correct: fileType [ 1589.697082] env[68040]: Faults: ['InvalidArgument'] {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1589.697183] env[68040]: DEBUG nova.compute.manager [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Unplugging VIFs for instance {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1589.697342] env[68040]: DEBUG nova.compute.manager [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1589.697513] env[68040]: DEBUG nova.compute.manager [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1589.697676] env[68040]: DEBUG nova.network.neutron [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1590.040041] env[68040]: DEBUG nova.network.neutron [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1590.054808] env[68040]: INFO nova.compute.manager [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Took 0.36 seconds to deallocate network for instance. [ 1590.151581] env[68040]: INFO nova.scheduler.client.report [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Deleted allocations for instance b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77 [ 1590.177151] env[68040]: DEBUG oslo_concurrency.lockutils [None req-d48d44a2-95cf-44b0-8b03-5d96c9ae6dc1 tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Lock "b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 533.659s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1590.178322] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 378.988s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1590.178515] env[68040]: INFO nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] During sync_power_state the instance has a pending task (spawning). Skip. [ 1590.178692] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1590.179351] env[68040]: DEBUG oslo_concurrency.lockutils [None req-40c72093-5de6-4ea1-81fd-f1034c981b6f tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Lock "b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 337.232s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1590.179577] env[68040]: DEBUG oslo_concurrency.lockutils [None req-40c72093-5de6-4ea1-81fd-f1034c981b6f tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Acquiring lock "b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1590.179783] env[68040]: DEBUG oslo_concurrency.lockutils [None req-40c72093-5de6-4ea1-81fd-f1034c981b6f tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Lock "b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1590.179948] env[68040]: DEBUG oslo_concurrency.lockutils [None req-40c72093-5de6-4ea1-81fd-f1034c981b6f tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Lock "b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1590.181751] env[68040]: INFO nova.compute.manager [None req-40c72093-5de6-4ea1-81fd-f1034c981b6f tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Terminating instance [ 1590.183442] env[68040]: DEBUG nova.compute.manager [None req-40c72093-5de6-4ea1-81fd-f1034c981b6f tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1590.183634] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-40c72093-5de6-4ea1-81fd-f1034c981b6f tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1590.183886] env[68040]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4a731d35-15e9-4443-a778-8f186c65f42a {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.193718] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6197cb24-913d-46dd-b7c0-43bb389c356a {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.204882] env[68040]: DEBUG nova.compute.manager [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1590.228744] env[68040]: WARNING nova.virt.vmwareapi.vmops [None req-40c72093-5de6-4ea1-81fd-f1034c981b6f tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77 could not be found. [ 1590.229019] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-40c72093-5de6-4ea1-81fd-f1034c981b6f tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1590.229188] env[68040]: INFO nova.compute.manager [None req-40c72093-5de6-4ea1-81fd-f1034c981b6f tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1590.229433] env[68040]: DEBUG oslo.service.loopingcall [None req-40c72093-5de6-4ea1-81fd-f1034c981b6f tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1590.229816] env[68040]: DEBUG nova.compute.manager [-] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1590.229816] env[68040]: DEBUG nova.network.neutron [-] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1590.253027] env[68040]: DEBUG oslo_concurrency.lockutils [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1590.253415] env[68040]: DEBUG oslo_concurrency.lockutils [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1590.254795] env[68040]: INFO nova.compute.claims [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1590.260670] env[68040]: DEBUG nova.network.neutron [-] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1590.266882] env[68040]: INFO nova.compute.manager [-] [instance: b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77] Took 0.04 seconds to deallocate network for instance. [ 1590.366281] env[68040]: DEBUG oslo_concurrency.lockutils [None req-40c72093-5de6-4ea1-81fd-f1034c981b6f tempest-ServersTestManualDisk-104538757 tempest-ServersTestManualDisk-104538757-project-member] Lock "b2df1bf9-3c29-4790-a53e-fc4ffe7b5a77" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.187s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1590.472646] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6605fe1c-5691-4102-a445-f438d7610d92 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.480008] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c30a87ad-df6a-4608-968f-611dfb4b2b68 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.510627] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dcf77c2-fe26-400e-b557-703517db0c01 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.517676] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c62578fd-0fd4-4568-97ba-90c699f1771c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.530286] env[68040]: DEBUG nova.compute.provider_tree [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1590.539027] env[68040]: DEBUG nova.scheduler.client.report [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1590.551815] env[68040]: DEBUG oslo_concurrency.lockutils [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.298s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1590.552319] env[68040]: DEBUG nova.compute.manager [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Start building networks asynchronously for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1590.582217] env[68040]: DEBUG nova.compute.utils [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Using /dev/sd instead of None {{(pid=68040) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1590.583634] env[68040]: DEBUG nova.compute.manager [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Allocating IP information in the background. {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1590.583831] env[68040]: DEBUG nova.network.neutron [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] allocate_for_instance() {{(pid=68040) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1590.591378] env[68040]: DEBUG nova.compute.manager [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Start building block device mappings for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1590.657554] env[68040]: DEBUG nova.compute.manager [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Start spawning the instance on the hypervisor. {{(pid=68040) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1590.683919] env[68040]: DEBUG nova.virt.hardware [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:59:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:59:33Z,direct_url=,disk_format='vmdk',id=8c308313-03d5-40b6-a5fe-9037e32dc76e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0770d674a39c40089de0aade9440b370',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:59:34Z,virtual_size=,visibility=), allow threads: False {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1590.684209] env[68040]: DEBUG nova.virt.hardware [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Flavor limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1590.684378] env[68040]: DEBUG nova.virt.hardware [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Image limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1590.684568] env[68040]: DEBUG nova.virt.hardware [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Flavor pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1590.684722] env[68040]: DEBUG nova.virt.hardware [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Image pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1590.684874] env[68040]: DEBUG nova.virt.hardware [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1590.685102] env[68040]: DEBUG nova.virt.hardware [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1590.685275] env[68040]: DEBUG nova.virt.hardware [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1590.685453] env[68040]: DEBUG nova.virt.hardware [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Got 1 possible topologies {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1590.685621] env[68040]: DEBUG nova.virt.hardware [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1590.685797] env[68040]: DEBUG nova.virt.hardware [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1590.686772] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a3f5355-7b09-47a2-9181-6628fa2ca1b4 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.694757] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c6c20af-9861-4b77-8328-8b0483499509 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.852752] env[68040]: DEBUG nova.policy [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2e1160641e0248f1984bc04c0b82bbe7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9f8a7e0143e342eda476fbb96d33f33b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68040) authorize /opt/stack/nova/nova/policy.py:203}} [ 1591.311848] env[68040]: DEBUG nova.network.neutron [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Successfully created port: 816d9d97-e9a8-4a65-984f-e14c67820436 {{(pid=68040) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1591.987888] env[68040]: DEBUG nova.network.neutron [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Successfully updated port: 816d9d97-e9a8-4a65-984f-e14c67820436 {{(pid=68040) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1592.002512] env[68040]: DEBUG oslo_concurrency.lockutils [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Acquiring lock "refresh_cache-221a5bbe-7168-4f5c-ab49-8a149545655f" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1592.002672] env[68040]: DEBUG oslo_concurrency.lockutils [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Acquired lock "refresh_cache-221a5bbe-7168-4f5c-ab49-8a149545655f" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1592.002861] env[68040]: DEBUG nova.network.neutron [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Building network info cache for instance {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1592.083460] env[68040]: DEBUG nova.network.neutron [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1592.187736] env[68040]: DEBUG nova.compute.manager [req-8f28bf42-5bb4-48ef-bb02-b7b601cdc773 req-f5c13701-ea98-42b2-ac45-9fec4af34c53 service nova] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Received event network-vif-plugged-816d9d97-e9a8-4a65-984f-e14c67820436 {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1592.187965] env[68040]: DEBUG oslo_concurrency.lockutils [req-8f28bf42-5bb4-48ef-bb02-b7b601cdc773 req-f5c13701-ea98-42b2-ac45-9fec4af34c53 service nova] Acquiring lock "221a5bbe-7168-4f5c-ab49-8a149545655f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1592.188193] env[68040]: DEBUG oslo_concurrency.lockutils [req-8f28bf42-5bb4-48ef-bb02-b7b601cdc773 req-f5c13701-ea98-42b2-ac45-9fec4af34c53 service nova] Lock "221a5bbe-7168-4f5c-ab49-8a149545655f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1592.188395] env[68040]: DEBUG oslo_concurrency.lockutils [req-8f28bf42-5bb4-48ef-bb02-b7b601cdc773 req-f5c13701-ea98-42b2-ac45-9fec4af34c53 service nova] Lock "221a5bbe-7168-4f5c-ab49-8a149545655f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1592.188595] env[68040]: DEBUG nova.compute.manager [req-8f28bf42-5bb4-48ef-bb02-b7b601cdc773 req-f5c13701-ea98-42b2-ac45-9fec4af34c53 service nova] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] No waiting events found dispatching network-vif-plugged-816d9d97-e9a8-4a65-984f-e14c67820436 {{(pid=68040) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1592.188769] env[68040]: WARNING nova.compute.manager [req-8f28bf42-5bb4-48ef-bb02-b7b601cdc773 req-f5c13701-ea98-42b2-ac45-9fec4af34c53 service nova] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Received unexpected event network-vif-plugged-816d9d97-e9a8-4a65-984f-e14c67820436 for instance with vm_state building and task_state spawning. [ 1592.188919] env[68040]: DEBUG nova.compute.manager [req-8f28bf42-5bb4-48ef-bb02-b7b601cdc773 req-f5c13701-ea98-42b2-ac45-9fec4af34c53 service nova] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Received event network-changed-816d9d97-e9a8-4a65-984f-e14c67820436 {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1592.189143] env[68040]: DEBUG nova.compute.manager [req-8f28bf42-5bb4-48ef-bb02-b7b601cdc773 req-f5c13701-ea98-42b2-ac45-9fec4af34c53 service nova] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Refreshing instance network info cache due to event network-changed-816d9d97-e9a8-4a65-984f-e14c67820436. {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1592.189284] env[68040]: DEBUG oslo_concurrency.lockutils [req-8f28bf42-5bb4-48ef-bb02-b7b601cdc773 req-f5c13701-ea98-42b2-ac45-9fec4af34c53 service nova] Acquiring lock "refresh_cache-221a5bbe-7168-4f5c-ab49-8a149545655f" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1592.412593] env[68040]: DEBUG nova.network.neutron [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Updating instance_info_cache with network_info: [{"id": "816d9d97-e9a8-4a65-984f-e14c67820436", "address": "fa:16:3e:ac:1c:fb", "network": {"id": "e4cf4d22-ee61-47a0-8234-24cf26dffa7d", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-855034290-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9f8a7e0143e342eda476fbb96d33f33b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "744515ee-aa5b-4c23-b959-b56c51da6b86", "external-id": "nsx-vlan-transportzone-310", "segmentation_id": 310, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap816d9d97-e9", "ovs_interfaceid": "816d9d97-e9a8-4a65-984f-e14c67820436", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1592.425814] env[68040]: DEBUG oslo_concurrency.lockutils [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Releasing lock "refresh_cache-221a5bbe-7168-4f5c-ab49-8a149545655f" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1592.426122] env[68040]: DEBUG nova.compute.manager [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Instance network_info: |[{"id": "816d9d97-e9a8-4a65-984f-e14c67820436", "address": "fa:16:3e:ac:1c:fb", "network": {"id": "e4cf4d22-ee61-47a0-8234-24cf26dffa7d", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-855034290-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9f8a7e0143e342eda476fbb96d33f33b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "744515ee-aa5b-4c23-b959-b56c51da6b86", "external-id": "nsx-vlan-transportzone-310", "segmentation_id": 310, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap816d9d97-e9", "ovs_interfaceid": "816d9d97-e9a8-4a65-984f-e14c67820436", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1592.426424] env[68040]: DEBUG oslo_concurrency.lockutils [req-8f28bf42-5bb4-48ef-bb02-b7b601cdc773 req-f5c13701-ea98-42b2-ac45-9fec4af34c53 service nova] Acquired lock "refresh_cache-221a5bbe-7168-4f5c-ab49-8a149545655f" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1592.426601] env[68040]: DEBUG nova.network.neutron [req-8f28bf42-5bb4-48ef-bb02-b7b601cdc773 req-f5c13701-ea98-42b2-ac45-9fec4af34c53 service nova] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Refreshing network info cache for port 816d9d97-e9a8-4a65-984f-e14c67820436 {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1592.427651] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ac:1c:fb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '744515ee-aa5b-4c23-b959-b56c51da6b86', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '816d9d97-e9a8-4a65-984f-e14c67820436', 'vif_model': 'vmxnet3'}] {{(pid=68040) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1592.435969] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Creating folder: Project (9f8a7e0143e342eda476fbb96d33f33b). Parent ref: group-v639956. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1592.439348] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d7d3f7b4-cc00-4594-bbb2-0ab9e499a2b9 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.449454] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Created folder: Project (9f8a7e0143e342eda476fbb96d33f33b) in parent group-v639956. [ 1592.449875] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Creating folder: Instances. Parent ref: group-v640046. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1592.449875] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bbe6e338-e872-443d-96e9-4c01e7dcc0e7 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.458136] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Created folder: Instances in parent group-v640046. [ 1592.458362] env[68040]: DEBUG oslo.service.loopingcall [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1592.458603] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Creating VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1592.458794] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fd4664ec-34cd-43b3-88f6-70f29adb3f8e {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.476974] env[68040]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1592.476974] env[68040]: value = "task-3200304" [ 1592.476974] env[68040]: _type = "Task" [ 1592.476974] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1592.484017] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200304, 'name': CreateVM_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1592.710363] env[68040]: DEBUG nova.network.neutron [req-8f28bf42-5bb4-48ef-bb02-b7b601cdc773 req-f5c13701-ea98-42b2-ac45-9fec4af34c53 service nova] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Updated VIF entry in instance network info cache for port 816d9d97-e9a8-4a65-984f-e14c67820436. {{(pid=68040) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1592.710723] env[68040]: DEBUG nova.network.neutron [req-8f28bf42-5bb4-48ef-bb02-b7b601cdc773 req-f5c13701-ea98-42b2-ac45-9fec4af34c53 service nova] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Updating instance_info_cache with network_info: [{"id": "816d9d97-e9a8-4a65-984f-e14c67820436", "address": "fa:16:3e:ac:1c:fb", "network": {"id": "e4cf4d22-ee61-47a0-8234-24cf26dffa7d", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-855034290-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9f8a7e0143e342eda476fbb96d33f33b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "744515ee-aa5b-4c23-b959-b56c51da6b86", "external-id": "nsx-vlan-transportzone-310", "segmentation_id": 310, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap816d9d97-e9", "ovs_interfaceid": "816d9d97-e9a8-4a65-984f-e14c67820436", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1592.719870] env[68040]: DEBUG oslo_concurrency.lockutils [req-8f28bf42-5bb4-48ef-bb02-b7b601cdc773 req-f5c13701-ea98-42b2-ac45-9fec4af34c53 service nova] Releasing lock "refresh_cache-221a5bbe-7168-4f5c-ab49-8a149545655f" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1592.987475] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200304, 'name': CreateVM_Task, 'duration_secs': 0.287968} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1592.987694] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Created VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1592.988291] env[68040]: DEBUG oslo_concurrency.lockutils [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1592.988458] env[68040]: DEBUG oslo_concurrency.lockutils [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1592.988838] env[68040]: DEBUG oslo_concurrency.lockutils [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1592.989106] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-431ded29-2009-4274-a10a-b25930811fee {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.993292] env[68040]: DEBUG oslo_vmware.api [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Waiting for the task: (returnval){ [ 1592.993292] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]526b2b20-0559-c588-a4f2-66b37139e86e" [ 1592.993292] env[68040]: _type = "Task" [ 1592.993292] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1593.002342] env[68040]: DEBUG oslo_vmware.api [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]526b2b20-0559-c588-a4f2-66b37139e86e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1593.504030] env[68040]: DEBUG oslo_concurrency.lockutils [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1593.504350] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Processing image 8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1593.504350] env[68040]: DEBUG oslo_concurrency.lockutils [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1595.764777] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Acquiring lock "0210d9d4-2161-4b06-bc81-9de361accca6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1595.765088] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Lock "0210d9d4-2161-4b06-bc81-9de361accca6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1629.984229] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1630.983298] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1633.983271] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1633.995958] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1633.996131] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1633.996208] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1633.996326] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68040) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1633.997811] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4465682b-0462-41a3-9351-51f895b3025a {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.006514] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-878c8f36-fb21-4160-aa26-cd135657bcc2 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.020374] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83978265-fa62-4640-b62f-219bd93cfd25 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.026596] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50206f62-1a7a-4e76-a9cd-796717f48a0f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.054699] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180938MB free_disk=125GB free_vcpus=48 pci_devices=None {{(pid=68040) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1634.054852] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1634.055057] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1634.129697] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance e92b662c-b458-49d8-ac2a-00ae6046a11b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1634.129869] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 57cd94c2-aec3-427e-9b9f-a444fe291974 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1634.130009] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 87a7851e-d6fe-481a-8abb-5732e281cb64 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1634.130139] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance c581d685-7ea0-41f8-b911-ff1dce1b46c7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1634.130261] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 4a08d3e3-5e84-4f34-b418-2c18eadbef25 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1634.130383] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 268b5613-b132-49ed-a45b-bc88132177cf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1634.130503] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 4dfa01f8-53a0-4ee4-9b00-93017144ea0b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1634.130620] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1634.130736] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1634.130851] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 221a5bbe-7168-4f5c-ab49-8a149545655f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1634.142912] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 031481de-d52f-4f3f-80e5-0d0d6803d624 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1634.153296] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 2e44ead1-4676-4d9b-bbae-5082f505fc8b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1634.163219] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance f89a378a-376a-48d0-a01b-75c5bb4d8cd9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1634.172809] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 0210d9d4-2161-4b06-bc81-9de361accca6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1634.173027] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1634.173176] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1634.190130] env[68040]: DEBUG nova.scheduler.client.report [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Refreshing inventories for resource provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 1634.204899] env[68040]: DEBUG nova.scheduler.client.report [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Updating ProviderTree inventory for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 1634.205097] env[68040]: DEBUG nova.compute.provider_tree [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Updating inventory in ProviderTree for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1634.214961] env[68040]: DEBUG nova.scheduler.client.report [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Refreshing aggregate associations for resource provider 22db6f73-b3da-436a-bf40-9c8c240b2e44, aggregates: None {{(pid=68040) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 1634.233941] env[68040]: DEBUG nova.scheduler.client.report [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Refreshing trait associations for resource provider 22db6f73-b3da-436a-bf40-9c8c240b2e44, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=68040) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 1634.389226] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4142c76a-781c-400c-92be-1f04429bba7f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.397948] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04df29b6-3be3-4335-bcd2-3aa88aa93db9 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.426822] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcae54e0-8b05-4118-8f05-33b64be24af4 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.433340] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7851a99-1d1a-482d-b4eb-aa87231495ac {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.446101] env[68040]: DEBUG nova.compute.provider_tree [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1634.454365] env[68040]: DEBUG nova.scheduler.client.report [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1634.469336] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68040) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1634.469439] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.414s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1634.857368] env[68040]: WARNING oslo_vmware.rw_handles [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1634.857368] env[68040]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1634.857368] env[68040]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1634.857368] env[68040]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1634.857368] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1634.857368] env[68040]: ERROR oslo_vmware.rw_handles response.begin() [ 1634.857368] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1634.857368] env[68040]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1634.857368] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1634.857368] env[68040]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1634.857368] env[68040]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1634.857368] env[68040]: ERROR oslo_vmware.rw_handles [ 1634.857867] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Downloaded image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to vmware_temp/92186b1a-bee4-46c5-b2d6-b72aa9115dfd/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1634.859732] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Caching image {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1634.859968] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Copying Virtual Disk [datastore2] vmware_temp/92186b1a-bee4-46c5-b2d6-b72aa9115dfd/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk to [datastore2] vmware_temp/92186b1a-bee4-46c5-b2d6-b72aa9115dfd/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk {{(pid=68040) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1634.860266] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3ec3f7ac-2fcb-483f-a84b-dcce6294e17b {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.867655] env[68040]: DEBUG oslo_vmware.api [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Waiting for the task: (returnval){ [ 1634.867655] env[68040]: value = "task-3200305" [ 1634.867655] env[68040]: _type = "Task" [ 1634.867655] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1634.875410] env[68040]: DEBUG oslo_vmware.api [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Task: {'id': task-3200305, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.377608] env[68040]: DEBUG oslo_vmware.exceptions [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Fault InvalidArgument not matched. {{(pid=68040) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1635.377870] env[68040]: DEBUG oslo_concurrency.lockutils [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1635.378414] env[68040]: ERROR nova.compute.manager [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1635.378414] env[68040]: Faults: ['InvalidArgument'] [ 1635.378414] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Traceback (most recent call last): [ 1635.378414] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1635.378414] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] yield resources [ 1635.378414] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1635.378414] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] self.driver.spawn(context, instance, image_meta, [ 1635.378414] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1635.378414] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1635.378414] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1635.378414] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] self._fetch_image_if_missing(context, vi) [ 1635.378414] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1635.378762] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] image_cache(vi, tmp_image_ds_loc) [ 1635.378762] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1635.378762] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] vm_util.copy_virtual_disk( [ 1635.378762] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1635.378762] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] session._wait_for_task(vmdk_copy_task) [ 1635.378762] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1635.378762] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] return self.wait_for_task(task_ref) [ 1635.378762] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1635.378762] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] return evt.wait() [ 1635.378762] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1635.378762] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] result = hub.switch() [ 1635.378762] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1635.378762] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] return self.greenlet.switch() [ 1635.379109] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1635.379109] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] self.f(*self.args, **self.kw) [ 1635.379109] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1635.379109] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] raise exceptions.translate_fault(task_info.error) [ 1635.379109] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1635.379109] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Faults: ['InvalidArgument'] [ 1635.379109] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] [ 1635.379109] env[68040]: INFO nova.compute.manager [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Terminating instance [ 1635.380290] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1635.380500] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1635.380736] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-39ba768e-4292-41de-972c-df509f02c5d8 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.383635] env[68040]: DEBUG nova.compute.manager [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1635.383804] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1635.384542] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39ac33ca-cb02-41b5-ad4f-67fd83f6312b {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.391183] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Unregistering the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1635.391434] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-24c32cd3-8f2c-4efd-8535-7d32d280181c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.393556] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1635.393732] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68040) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1635.394697] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f8235e3e-19bb-4647-b217-a39b8c76f23e {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.399330] env[68040]: DEBUG oslo_vmware.api [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Waiting for the task: (returnval){ [ 1635.399330] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52b3f348-86a2-6160-37ed-c3c61f63e86d" [ 1635.399330] env[68040]: _type = "Task" [ 1635.399330] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1635.407425] env[68040]: DEBUG oslo_vmware.api [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52b3f348-86a2-6160-37ed-c3c61f63e86d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.455389] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Unregistered the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1635.455627] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Deleting contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1635.455796] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Deleting the datastore file [datastore2] e92b662c-b458-49d8-ac2a-00ae6046a11b {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1635.456074] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e31d0b12-af25-4a84-b945-ff2abe19bc45 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.462354] env[68040]: DEBUG oslo_vmware.api [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Waiting for the task: (returnval){ [ 1635.462354] env[68040]: value = "task-3200307" [ 1635.462354] env[68040]: _type = "Task" [ 1635.462354] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1635.469594] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1635.469814] env[68040]: DEBUG oslo_vmware.api [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Task: {'id': task-3200307, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.909370] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Preparing fetch location {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1635.909636] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Creating directory with path [datastore2] vmware_temp/95bcf0c9-6ede-47be-becd-1ec475e17ad9/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1635.909878] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c0d6e81a-fb94-4e81-9bba-9fdf77ad250e {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.928448] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Created directory with path [datastore2] vmware_temp/95bcf0c9-6ede-47be-becd-1ec475e17ad9/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1635.928645] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Fetch image to [datastore2] vmware_temp/95bcf0c9-6ede-47be-becd-1ec475e17ad9/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1635.928810] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to [datastore2] vmware_temp/95bcf0c9-6ede-47be-becd-1ec475e17ad9/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1635.929588] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb3bce00-50da-4691-b0f4-12aba20303d3 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.936258] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc5589bd-f00d-4229-954b-e1701272a918 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.945101] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd282878-a1ec-4f70-8f1a-6ee6d3e111f5 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.977653] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57b17ab6-8940-4878-b9fe-7853c71c927a {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.984403] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1635.984651] env[68040]: DEBUG oslo_vmware.api [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Task: {'id': task-3200307, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.089896} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1635.986122] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Deleted the datastore file {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1635.986320] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Deleted contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1635.986493] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1635.986661] env[68040]: INFO nova.compute.manager [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1635.988652] env[68040]: DEBUG nova.compute.claims [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Aborting claim: {{(pid=68040) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1635.988874] env[68040]: DEBUG oslo_concurrency.lockutils [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1635.989033] env[68040]: DEBUG oslo_concurrency.lockutils [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1635.991519] env[68040]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d46ae602-9e1a-4931-b03b-e956fbacbc54 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.017142] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1636.070597] env[68040]: DEBUG oslo_vmware.rw_handles [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/95bcf0c9-6ede-47be-becd-1ec475e17ad9/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1636.131072] env[68040]: DEBUG oslo_vmware.rw_handles [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Completed reading data from the image iterator. {{(pid=68040) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1636.131269] env[68040]: DEBUG oslo_vmware.rw_handles [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/95bcf0c9-6ede-47be-becd-1ec475e17ad9/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1636.245908] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3664785-d9b5-4b58-a7b5-3ed274ad1c77 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.254860] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38a951db-96e4-49ce-9d19-b6014fe31cd0 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.283636] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6479aef8-f5f0-4e32-a185-72ae901242ce {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.290549] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e778d2a9-7267-48b3-b6c6-e701701d7903 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.303410] env[68040]: DEBUG nova.compute.provider_tree [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1636.311760] env[68040]: DEBUG nova.scheduler.client.report [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1636.325098] env[68040]: DEBUG oslo_concurrency.lockutils [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.336s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1636.325631] env[68040]: ERROR nova.compute.manager [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1636.325631] env[68040]: Faults: ['InvalidArgument'] [ 1636.325631] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Traceback (most recent call last): [ 1636.325631] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1636.325631] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] self.driver.spawn(context, instance, image_meta, [ 1636.325631] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1636.325631] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1636.325631] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1636.325631] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] self._fetch_image_if_missing(context, vi) [ 1636.325631] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1636.325631] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] image_cache(vi, tmp_image_ds_loc) [ 1636.325631] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1636.325950] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] vm_util.copy_virtual_disk( [ 1636.325950] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1636.325950] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] session._wait_for_task(vmdk_copy_task) [ 1636.325950] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1636.325950] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] return self.wait_for_task(task_ref) [ 1636.325950] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1636.325950] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] return evt.wait() [ 1636.325950] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1636.325950] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] result = hub.switch() [ 1636.325950] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1636.325950] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] return self.greenlet.switch() [ 1636.325950] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1636.325950] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] self.f(*self.args, **self.kw) [ 1636.326540] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1636.326540] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] raise exceptions.translate_fault(task_info.error) [ 1636.326540] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1636.326540] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Faults: ['InvalidArgument'] [ 1636.326540] env[68040]: ERROR nova.compute.manager [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] [ 1636.326540] env[68040]: DEBUG nova.compute.utils [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] VimFaultException {{(pid=68040) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1636.327706] env[68040]: DEBUG nova.compute.manager [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Build of instance e92b662c-b458-49d8-ac2a-00ae6046a11b was re-scheduled: A specified parameter was not correct: fileType [ 1636.327706] env[68040]: Faults: ['InvalidArgument'] {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1636.328095] env[68040]: DEBUG nova.compute.manager [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Unplugging VIFs for instance {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1636.328275] env[68040]: DEBUG nova.compute.manager [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1636.328451] env[68040]: DEBUG nova.compute.manager [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1636.328608] env[68040]: DEBUG nova.network.neutron [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1636.703587] env[68040]: DEBUG nova.network.neutron [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1636.716303] env[68040]: INFO nova.compute.manager [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Took 0.39 seconds to deallocate network for instance. [ 1636.802797] env[68040]: INFO nova.scheduler.client.report [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Deleted allocations for instance e92b662c-b458-49d8-ac2a-00ae6046a11b [ 1636.827595] env[68040]: DEBUG oslo_concurrency.lockutils [None req-6ef52700-5994-41b7-95b1-e3f8ece68694 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Lock "e92b662c-b458-49d8-ac2a-00ae6046a11b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 573.214s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1636.829152] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "e92b662c-b458-49d8-ac2a-00ae6046a11b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 425.639s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1636.829660] env[68040]: INFO nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] During sync_power_state the instance has a pending task (spawning). Skip. [ 1636.830340] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "e92b662c-b458-49d8-ac2a-00ae6046a11b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1636.830544] env[68040]: DEBUG oslo_concurrency.lockutils [None req-3c6c6b5e-efcd-4305-af71-79518916aed8 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Lock "e92b662c-b458-49d8-ac2a-00ae6046a11b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 376.713s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1636.830765] env[68040]: DEBUG oslo_concurrency.lockutils [None req-3c6c6b5e-efcd-4305-af71-79518916aed8 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Acquiring lock "e92b662c-b458-49d8-ac2a-00ae6046a11b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1636.830973] env[68040]: DEBUG oslo_concurrency.lockutils [None req-3c6c6b5e-efcd-4305-af71-79518916aed8 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Lock "e92b662c-b458-49d8-ac2a-00ae6046a11b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1636.831321] env[68040]: DEBUG oslo_concurrency.lockutils [None req-3c6c6b5e-efcd-4305-af71-79518916aed8 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Lock "e92b662c-b458-49d8-ac2a-00ae6046a11b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1636.833246] env[68040]: INFO nova.compute.manager [None req-3c6c6b5e-efcd-4305-af71-79518916aed8 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Terminating instance [ 1636.834981] env[68040]: DEBUG nova.compute.manager [None req-3c6c6b5e-efcd-4305-af71-79518916aed8 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1636.835203] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-3c6c6b5e-efcd-4305-af71-79518916aed8 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1636.835457] env[68040]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a85af9f8-3146-4e3f-83d9-b2e962db3717 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.845149] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09d407a7-25d8-499f-8d33-a6c2c2b5f818 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.858498] env[68040]: DEBUG nova.compute.manager [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1636.879255] env[68040]: WARNING nova.virt.vmwareapi.vmops [None req-3c6c6b5e-efcd-4305-af71-79518916aed8 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e92b662c-b458-49d8-ac2a-00ae6046a11b could not be found. [ 1636.879481] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-3c6c6b5e-efcd-4305-af71-79518916aed8 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1636.879657] env[68040]: INFO nova.compute.manager [None req-3c6c6b5e-efcd-4305-af71-79518916aed8 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1636.879900] env[68040]: DEBUG oslo.service.loopingcall [None req-3c6c6b5e-efcd-4305-af71-79518916aed8 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1636.880151] env[68040]: DEBUG nova.compute.manager [-] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1636.880248] env[68040]: DEBUG nova.network.neutron [-] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1636.903331] env[68040]: DEBUG nova.network.neutron [-] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1636.906580] env[68040]: DEBUG oslo_concurrency.lockutils [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1636.906811] env[68040]: DEBUG oslo_concurrency.lockutils [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1636.908271] env[68040]: INFO nova.compute.claims [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1636.911529] env[68040]: INFO nova.compute.manager [-] [instance: e92b662c-b458-49d8-ac2a-00ae6046a11b] Took 0.03 seconds to deallocate network for instance. [ 1636.985464] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1636.985630] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Starting heal instance info cache {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1636.985753] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Rebuilding the list of instances to heal {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1637.000755] env[68040]: DEBUG oslo_concurrency.lockutils [None req-3c6c6b5e-efcd-4305-af71-79518916aed8 tempest-ServerAddressesTestJSON-1810601680 tempest-ServerAddressesTestJSON-1810601680-project-member] Lock "e92b662c-b458-49d8-ac2a-00ae6046a11b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.170s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1637.005720] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1637.005802] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1637.005911] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1637.005991] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1637.006137] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1637.006264] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1637.006383] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1637.006506] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1637.006641] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1637.006737] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1637.006859] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Didn't find any instances for network info cache update. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1637.007310] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1637.115391] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03d2c2f2-8eb7-40e0-be5b-f8c6faa83ccd {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.122634] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfe1f9e2-3125-44af-9274-21a459edca50 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.152676] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2db8d851-5316-4ad4-b86b-09d641e2d8c6 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.159979] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b603a6b8-156e-4fe7-b17b-748ffa351a4f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.174508] env[68040]: DEBUG nova.compute.provider_tree [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1637.183231] env[68040]: DEBUG nova.scheduler.client.report [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1637.199851] env[68040]: DEBUG oslo_concurrency.lockutils [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.293s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1637.200309] env[68040]: DEBUG nova.compute.manager [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Start building networks asynchronously for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1637.236333] env[68040]: DEBUG nova.compute.utils [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Using /dev/sd instead of None {{(pid=68040) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1637.237506] env[68040]: DEBUG nova.compute.manager [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Allocating IP information in the background. {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1637.237680] env[68040]: DEBUG nova.network.neutron [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] allocate_for_instance() {{(pid=68040) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1637.246533] env[68040]: DEBUG nova.compute.manager [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Start building block device mappings for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1637.304707] env[68040]: DEBUG nova.policy [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c8e66b0d9ada4cabbb8efd2e8340a3a9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '52dbf578e94a4db7af130703ad4eb741', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68040) authorize /opt/stack/nova/nova/policy.py:203}} [ 1637.309345] env[68040]: DEBUG nova.compute.manager [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Start spawning the instance on the hypervisor. {{(pid=68040) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1637.336026] env[68040]: DEBUG nova.virt.hardware [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:59:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:59:33Z,direct_url=,disk_format='vmdk',id=8c308313-03d5-40b6-a5fe-9037e32dc76e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0770d674a39c40089de0aade9440b370',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:59:34Z,virtual_size=,visibility=), allow threads: False {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1637.336026] env[68040]: DEBUG nova.virt.hardware [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Flavor limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1637.336222] env[68040]: DEBUG nova.virt.hardware [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Image limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1637.336321] env[68040]: DEBUG nova.virt.hardware [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Flavor pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1637.336471] env[68040]: DEBUG nova.virt.hardware [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Image pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1637.336621] env[68040]: DEBUG nova.virt.hardware [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1637.336831] env[68040]: DEBUG nova.virt.hardware [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1637.336991] env[68040]: DEBUG nova.virt.hardware [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1637.337213] env[68040]: DEBUG nova.virt.hardware [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Got 1 possible topologies {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1637.337401] env[68040]: DEBUG nova.virt.hardware [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1637.337581] env[68040]: DEBUG nova.virt.hardware [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1637.338436] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0ef7c7d-6e47-406f-a68d-e8466029c737 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.346419] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3566a23f-554a-43a0-94b5-620032faa17f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.697343] env[68040]: DEBUG nova.network.neutron [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Successfully created port: c3433824-9b89-4990-9751-5e5170359ecd {{(pid=68040) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1637.984170] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1637.984435] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68040) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1638.439892] env[68040]: DEBUG nova.network.neutron [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Successfully updated port: c3433824-9b89-4990-9751-5e5170359ecd {{(pid=68040) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1638.454429] env[68040]: DEBUG oslo_concurrency.lockutils [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Acquiring lock "refresh_cache-031481de-d52f-4f3f-80e5-0d0d6803d624" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1638.454586] env[68040]: DEBUG oslo_concurrency.lockutils [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Acquired lock "refresh_cache-031481de-d52f-4f3f-80e5-0d0d6803d624" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1638.454736] env[68040]: DEBUG nova.network.neutron [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Building network info cache for instance {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1638.511993] env[68040]: DEBUG nova.network.neutron [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1638.697870] env[68040]: DEBUG nova.network.neutron [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Updating instance_info_cache with network_info: [{"id": "c3433824-9b89-4990-9751-5e5170359ecd", "address": "fa:16:3e:91:b8:de", "network": {"id": "9839fe48-68c5-4649-bd83-6b4d9c6008e8", "bridge": "br-int", "label": "tempest-ServersTestJSON-1965746643-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52dbf578e94a4db7af130703ad4eb741", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4b033f4d-2e92-4702-add6-410a29d3f251", "external-id": "nsx-vlan-transportzone-649", "segmentation_id": 649, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3433824-9b", "ovs_interfaceid": "c3433824-9b89-4990-9751-5e5170359ecd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1638.710110] env[68040]: DEBUG oslo_concurrency.lockutils [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Releasing lock "refresh_cache-031481de-d52f-4f3f-80e5-0d0d6803d624" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1638.710230] env[68040]: DEBUG nova.compute.manager [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Instance network_info: |[{"id": "c3433824-9b89-4990-9751-5e5170359ecd", "address": "fa:16:3e:91:b8:de", "network": {"id": "9839fe48-68c5-4649-bd83-6b4d9c6008e8", "bridge": "br-int", "label": "tempest-ServersTestJSON-1965746643-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52dbf578e94a4db7af130703ad4eb741", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4b033f4d-2e92-4702-add6-410a29d3f251", "external-id": "nsx-vlan-transportzone-649", "segmentation_id": 649, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3433824-9b", "ovs_interfaceid": "c3433824-9b89-4990-9751-5e5170359ecd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1638.710669] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:91:b8:de', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4b033f4d-2e92-4702-add6-410a29d3f251', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c3433824-9b89-4990-9751-5e5170359ecd', 'vif_model': 'vmxnet3'}] {{(pid=68040) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1638.718239] env[68040]: DEBUG oslo.service.loopingcall [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1638.718715] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Creating VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1638.718947] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b2e31a19-ebf0-45ca-8356-cfa3e1d05323 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.739434] env[68040]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1638.739434] env[68040]: value = "task-3200308" [ 1638.739434] env[68040]: _type = "Task" [ 1638.739434] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1638.744472] env[68040]: DEBUG nova.compute.manager [req-9d79075c-90e3-4359-ae38-5908276c80c1 req-d8010caa-956f-41e0-9227-cebad3af6b11 service nova] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Received event network-vif-plugged-c3433824-9b89-4990-9751-5e5170359ecd {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1638.744677] env[68040]: DEBUG oslo_concurrency.lockutils [req-9d79075c-90e3-4359-ae38-5908276c80c1 req-d8010caa-956f-41e0-9227-cebad3af6b11 service nova] Acquiring lock "031481de-d52f-4f3f-80e5-0d0d6803d624-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1638.744878] env[68040]: DEBUG oslo_concurrency.lockutils [req-9d79075c-90e3-4359-ae38-5908276c80c1 req-d8010caa-956f-41e0-9227-cebad3af6b11 service nova] Lock "031481de-d52f-4f3f-80e5-0d0d6803d624-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1638.745062] env[68040]: DEBUG oslo_concurrency.lockutils [req-9d79075c-90e3-4359-ae38-5908276c80c1 req-d8010caa-956f-41e0-9227-cebad3af6b11 service nova] Lock "031481de-d52f-4f3f-80e5-0d0d6803d624-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1638.745232] env[68040]: DEBUG nova.compute.manager [req-9d79075c-90e3-4359-ae38-5908276c80c1 req-d8010caa-956f-41e0-9227-cebad3af6b11 service nova] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] No waiting events found dispatching network-vif-plugged-c3433824-9b89-4990-9751-5e5170359ecd {{(pid=68040) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1638.745396] env[68040]: WARNING nova.compute.manager [req-9d79075c-90e3-4359-ae38-5908276c80c1 req-d8010caa-956f-41e0-9227-cebad3af6b11 service nova] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Received unexpected event network-vif-plugged-c3433824-9b89-4990-9751-5e5170359ecd for instance with vm_state building and task_state spawning. [ 1638.745556] env[68040]: DEBUG nova.compute.manager [req-9d79075c-90e3-4359-ae38-5908276c80c1 req-d8010caa-956f-41e0-9227-cebad3af6b11 service nova] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Received event network-changed-c3433824-9b89-4990-9751-5e5170359ecd {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1638.745709] env[68040]: DEBUG nova.compute.manager [req-9d79075c-90e3-4359-ae38-5908276c80c1 req-d8010caa-956f-41e0-9227-cebad3af6b11 service nova] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Refreshing instance network info cache due to event network-changed-c3433824-9b89-4990-9751-5e5170359ecd. {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1638.745888] env[68040]: DEBUG oslo_concurrency.lockutils [req-9d79075c-90e3-4359-ae38-5908276c80c1 req-d8010caa-956f-41e0-9227-cebad3af6b11 service nova] Acquiring lock "refresh_cache-031481de-d52f-4f3f-80e5-0d0d6803d624" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1638.746034] env[68040]: DEBUG oslo_concurrency.lockutils [req-9d79075c-90e3-4359-ae38-5908276c80c1 req-d8010caa-956f-41e0-9227-cebad3af6b11 service nova] Acquired lock "refresh_cache-031481de-d52f-4f3f-80e5-0d0d6803d624" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1638.746218] env[68040]: DEBUG nova.network.neutron [req-9d79075c-90e3-4359-ae38-5908276c80c1 req-d8010caa-956f-41e0-9227-cebad3af6b11 service nova] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Refreshing network info cache for port c3433824-9b89-4990-9751-5e5170359ecd {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1638.752157] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200308, 'name': CreateVM_Task} progress is 5%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1639.250713] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200308, 'name': CreateVM_Task, 'duration_secs': 0.305759} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1639.251108] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Created VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1639.251523] env[68040]: DEBUG oslo_concurrency.lockutils [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1639.251694] env[68040]: DEBUG oslo_concurrency.lockutils [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1639.252025] env[68040]: DEBUG oslo_concurrency.lockutils [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1639.252269] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3fca4bf4-5a9b-4658-a904-79cca61aa3d4 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.256596] env[68040]: DEBUG oslo_vmware.api [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Waiting for the task: (returnval){ [ 1639.256596] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52a7fa7f-3587-04f5-1625-78baa7f3baa4" [ 1639.256596] env[68040]: _type = "Task" [ 1639.256596] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1639.263872] env[68040]: DEBUG oslo_vmware.api [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52a7fa7f-3587-04f5-1625-78baa7f3baa4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1639.269903] env[68040]: DEBUG nova.network.neutron [req-9d79075c-90e3-4359-ae38-5908276c80c1 req-d8010caa-956f-41e0-9227-cebad3af6b11 service nova] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Updated VIF entry in instance network info cache for port c3433824-9b89-4990-9751-5e5170359ecd. {{(pid=68040) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1639.270233] env[68040]: DEBUG nova.network.neutron [req-9d79075c-90e3-4359-ae38-5908276c80c1 req-d8010caa-956f-41e0-9227-cebad3af6b11 service nova] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Updating instance_info_cache with network_info: [{"id": "c3433824-9b89-4990-9751-5e5170359ecd", "address": "fa:16:3e:91:b8:de", "network": {"id": "9839fe48-68c5-4649-bd83-6b4d9c6008e8", "bridge": "br-int", "label": "tempest-ServersTestJSON-1965746643-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52dbf578e94a4db7af130703ad4eb741", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4b033f4d-2e92-4702-add6-410a29d3f251", "external-id": "nsx-vlan-transportzone-649", "segmentation_id": 649, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3433824-9b", "ovs_interfaceid": "c3433824-9b89-4990-9751-5e5170359ecd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1639.280294] env[68040]: DEBUG oslo_concurrency.lockutils [req-9d79075c-90e3-4359-ae38-5908276c80c1 req-d8010caa-956f-41e0-9227-cebad3af6b11 service nova] Releasing lock "refresh_cache-031481de-d52f-4f3f-80e5-0d0d6803d624" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1639.767441] env[68040]: DEBUG oslo_concurrency.lockutils [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1639.767654] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Processing image 8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1639.767895] env[68040]: DEBUG oslo_concurrency.lockutils [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1639.979692] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1649.492437] env[68040]: DEBUG oslo_concurrency.lockutils [None req-3f6d5799-130b-400f-a44b-e36b3755cdae tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Acquiring lock "c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1662.328998] env[68040]: DEBUG oslo_concurrency.lockutils [None req-b394bb2b-6266-4e45-8399-7ff3e17e4e9d tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Acquiring lock "221a5bbe-7168-4f5c-ab49-8a149545655f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1673.616846] env[68040]: DEBUG oslo_concurrency.lockutils [None req-eef1839f-ce2d-4fc0-8bf3-d02dc3f75e83 tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Acquiring lock "268b5613-b132-49ed-a45b-bc88132177cf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1683.658020] env[68040]: WARNING oslo_vmware.rw_handles [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1683.658020] env[68040]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1683.658020] env[68040]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1683.658020] env[68040]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1683.658020] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1683.658020] env[68040]: ERROR oslo_vmware.rw_handles response.begin() [ 1683.658020] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1683.658020] env[68040]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1683.658020] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1683.658020] env[68040]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1683.658020] env[68040]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1683.658020] env[68040]: ERROR oslo_vmware.rw_handles [ 1683.658697] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Downloaded image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to vmware_temp/95bcf0c9-6ede-47be-becd-1ec475e17ad9/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1683.660417] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Caching image {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1683.660664] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Copying Virtual Disk [datastore2] vmware_temp/95bcf0c9-6ede-47be-becd-1ec475e17ad9/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk to [datastore2] vmware_temp/95bcf0c9-6ede-47be-becd-1ec475e17ad9/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk {{(pid=68040) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1683.660942] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ba77b57d-1656-436b-ac40-ae0d7f495fc9 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.668959] env[68040]: DEBUG oslo_vmware.api [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Waiting for the task: (returnval){ [ 1683.668959] env[68040]: value = "task-3200309" [ 1683.668959] env[68040]: _type = "Task" [ 1683.668959] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1683.676455] env[68040]: DEBUG oslo_vmware.api [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Task: {'id': task-3200309, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1684.179214] env[68040]: DEBUG oslo_vmware.exceptions [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Fault InvalidArgument not matched. {{(pid=68040) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1684.179485] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1684.180037] env[68040]: ERROR nova.compute.manager [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1684.180037] env[68040]: Faults: ['InvalidArgument'] [ 1684.180037] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Traceback (most recent call last): [ 1684.180037] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1684.180037] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] yield resources [ 1684.180037] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1684.180037] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] self.driver.spawn(context, instance, image_meta, [ 1684.180037] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1684.180037] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1684.180037] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1684.180037] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] self._fetch_image_if_missing(context, vi) [ 1684.180037] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1684.180456] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] image_cache(vi, tmp_image_ds_loc) [ 1684.180456] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1684.180456] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] vm_util.copy_virtual_disk( [ 1684.180456] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1684.180456] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] session._wait_for_task(vmdk_copy_task) [ 1684.180456] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1684.180456] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] return self.wait_for_task(task_ref) [ 1684.180456] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1684.180456] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] return evt.wait() [ 1684.180456] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1684.180456] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] result = hub.switch() [ 1684.180456] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1684.180456] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] return self.greenlet.switch() [ 1684.180814] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1684.180814] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] self.f(*self.args, **self.kw) [ 1684.180814] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1684.180814] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] raise exceptions.translate_fault(task_info.error) [ 1684.180814] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1684.180814] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Faults: ['InvalidArgument'] [ 1684.180814] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] [ 1684.180814] env[68040]: INFO nova.compute.manager [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Terminating instance [ 1684.181875] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1684.182089] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1684.182319] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-92ea2ccf-88d3-4c5e-9df6-ea5ee9525628 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.184970] env[68040]: DEBUG nova.compute.manager [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1684.185175] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1684.185877] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a21fa834-db3a-4574-8629-c6896206af36 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.192810] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Unregistering the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1684.193019] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bd588f2f-ea6e-43d5-aca2-10f6bc6fa9ad {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.195180] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1684.195358] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68040) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1684.196279] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-57d9faf9-1626-48a0-9d16-bfde638d76ed {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.200819] env[68040]: DEBUG oslo_vmware.api [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Waiting for the task: (returnval){ [ 1684.200819] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52e66cf4-da9d-fd99-82b7-2b17ff315293" [ 1684.200819] env[68040]: _type = "Task" [ 1684.200819] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1684.207924] env[68040]: DEBUG oslo_vmware.api [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52e66cf4-da9d-fd99-82b7-2b17ff315293, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1684.282487] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Unregistered the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1684.282703] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Deleting contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1684.282872] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Deleting the datastore file [datastore2] 57cd94c2-aec3-427e-9b9f-a444fe291974 {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1684.283161] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5a495dd9-9b0c-4005-bafc-8e2b6f42f980 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.289203] env[68040]: DEBUG oslo_vmware.api [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Waiting for the task: (returnval){ [ 1684.289203] env[68040]: value = "task-3200311" [ 1684.289203] env[68040]: _type = "Task" [ 1684.289203] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1684.296653] env[68040]: DEBUG oslo_vmware.api [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Task: {'id': task-3200311, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1684.711589] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Preparing fetch location {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1684.711966] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Creating directory with path [datastore2] vmware_temp/00131ba7-40d9-405a-b8c0-325b69c93d46/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1684.712150] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4523cd4e-cf10-46ad-8d2f-e795baedaf06 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.723177] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Created directory with path [datastore2] vmware_temp/00131ba7-40d9-405a-b8c0-325b69c93d46/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1684.723364] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Fetch image to [datastore2] vmware_temp/00131ba7-40d9-405a-b8c0-325b69c93d46/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1684.723537] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to [datastore2] vmware_temp/00131ba7-40d9-405a-b8c0-325b69c93d46/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1684.724338] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1982d107-a261-4178-8a16-19f36fae3f58 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.730442] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8c9669e-3c9a-448a-a09a-ea54181e106e {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.739139] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6783bc6-327c-410d-be99-f47de377339d {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.769380] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7d77254-dea5-4775-992c-7922508de9d2 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.774788] env[68040]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-314fbb38-608a-46d4-9f32-b759357ef8af {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.793756] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1684.802009] env[68040]: DEBUG oslo_vmware.api [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Task: {'id': task-3200311, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.064129} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1684.802009] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Deleted the datastore file {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1684.802009] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Deleted contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1684.802009] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1684.802009] env[68040]: INFO nova.compute.manager [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1684.803902] env[68040]: DEBUG nova.compute.claims [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Aborting claim: {{(pid=68040) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1684.804089] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1684.804306] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1684.955105] env[68040]: DEBUG oslo_vmware.rw_handles [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/00131ba7-40d9-405a-b8c0-325b69c93d46/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1685.016559] env[68040]: DEBUG oslo_vmware.rw_handles [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Completed reading data from the image iterator. {{(pid=68040) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1685.016763] env[68040]: DEBUG oslo_vmware.rw_handles [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/00131ba7-40d9-405a-b8c0-325b69c93d46/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1685.075927] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7e7ab68-990e-40c2-83ee-917cc105a521 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.083491] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2907e20c-5765-4af3-89d1-f5794297efe3 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.113184] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4bf8ec6-2373-4d9e-b4eb-a6378da6fc3f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.120264] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5882ae82-d73a-43f4-bfec-637ed3a44576 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.133118] env[68040]: DEBUG nova.compute.provider_tree [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1685.141470] env[68040]: DEBUG nova.scheduler.client.report [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1685.160899] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.355s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1685.160899] env[68040]: ERROR nova.compute.manager [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1685.160899] env[68040]: Faults: ['InvalidArgument'] [ 1685.160899] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Traceback (most recent call last): [ 1685.160899] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1685.160899] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] self.driver.spawn(context, instance, image_meta, [ 1685.160899] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1685.160899] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1685.160899] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1685.160899] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] self._fetch_image_if_missing(context, vi) [ 1685.161183] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1685.161183] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] image_cache(vi, tmp_image_ds_loc) [ 1685.161183] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1685.161183] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] vm_util.copy_virtual_disk( [ 1685.161183] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1685.161183] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] session._wait_for_task(vmdk_copy_task) [ 1685.161183] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1685.161183] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] return self.wait_for_task(task_ref) [ 1685.161183] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1685.161183] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] return evt.wait() [ 1685.161183] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1685.161183] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] result = hub.switch() [ 1685.161183] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1685.161497] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] return self.greenlet.switch() [ 1685.161497] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1685.161497] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] self.f(*self.args, **self.kw) [ 1685.161497] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1685.161497] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] raise exceptions.translate_fault(task_info.error) [ 1685.161497] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1685.161497] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Faults: ['InvalidArgument'] [ 1685.161497] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] [ 1685.161497] env[68040]: DEBUG nova.compute.utils [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] VimFaultException {{(pid=68040) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1685.162330] env[68040]: DEBUG nova.compute.manager [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Build of instance 57cd94c2-aec3-427e-9b9f-a444fe291974 was re-scheduled: A specified parameter was not correct: fileType [ 1685.162330] env[68040]: Faults: ['InvalidArgument'] {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1685.163330] env[68040]: DEBUG nova.compute.manager [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Unplugging VIFs for instance {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1685.163330] env[68040]: DEBUG nova.compute.manager [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1685.163330] env[68040]: DEBUG nova.compute.manager [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1685.163479] env[68040]: DEBUG nova.network.neutron [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1685.283955] env[68040]: DEBUG neutronclient.v2_0.client [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=68040) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1685.285094] env[68040]: ERROR nova.compute.manager [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 1685.285094] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Traceback (most recent call last): [ 1685.285094] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1685.285094] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] self.driver.spawn(context, instance, image_meta, [ 1685.285094] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1685.285094] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1685.285094] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1685.285094] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] self._fetch_image_if_missing(context, vi) [ 1685.285094] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1685.285094] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] image_cache(vi, tmp_image_ds_loc) [ 1685.285094] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1685.285094] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] vm_util.copy_virtual_disk( [ 1685.285474] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1685.285474] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] session._wait_for_task(vmdk_copy_task) [ 1685.285474] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1685.285474] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] return self.wait_for_task(task_ref) [ 1685.285474] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1685.285474] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] return evt.wait() [ 1685.285474] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1685.285474] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] result = hub.switch() [ 1685.285474] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1685.285474] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] return self.greenlet.switch() [ 1685.285474] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1685.285474] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] self.f(*self.args, **self.kw) [ 1685.285474] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1685.285843] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] raise exceptions.translate_fault(task_info.error) [ 1685.285843] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1685.285843] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Faults: ['InvalidArgument'] [ 1685.285843] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] [ 1685.285843] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] During handling of the above exception, another exception occurred: [ 1685.285843] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] [ 1685.285843] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Traceback (most recent call last): [ 1685.285843] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/nova/nova/compute/manager.py", line 2447, in _do_build_and_run_instance [ 1685.285843] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] self._build_and_run_instance(context, instance, image, [ 1685.285843] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/nova/nova/compute/manager.py", line 2739, in _build_and_run_instance [ 1685.285843] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] raise exception.RescheduledException( [ 1685.285843] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] nova.exception.RescheduledException: Build of instance 57cd94c2-aec3-427e-9b9f-a444fe291974 was re-scheduled: A specified parameter was not correct: fileType [ 1685.285843] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Faults: ['InvalidArgument'] [ 1685.285843] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] [ 1685.286269] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] During handling of the above exception, another exception occurred: [ 1685.286269] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] [ 1685.286269] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Traceback (most recent call last): [ 1685.286269] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1685.286269] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] ret = obj(*args, **kwargs) [ 1685.286269] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1685.286269] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] exception_handler_v20(status_code, error_body) [ 1685.286269] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1685.286269] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] raise client_exc(message=error_message, [ 1685.286269] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1685.286269] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Neutron server returns request_ids: ['req-d68f4f6d-515b-46cc-b5ae-1747a1156f84'] [ 1685.286269] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] [ 1685.286269] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] During handling of the above exception, another exception occurred: [ 1685.286650] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] [ 1685.286650] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Traceback (most recent call last): [ 1685.286650] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/nova/nova/compute/manager.py", line 3036, in _cleanup_allocated_networks [ 1685.286650] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] self._deallocate_network(context, instance, requested_networks) [ 1685.286650] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1685.286650] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] self.network_api.deallocate_for_instance( [ 1685.286650] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1685.286650] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] data = neutron.list_ports(**search_opts) [ 1685.286650] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1685.286650] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] ret = obj(*args, **kwargs) [ 1685.286650] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1685.286650] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] return self.list('ports', self.ports_path, retrieve_all, [ 1685.286650] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1685.287044] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] ret = obj(*args, **kwargs) [ 1685.287044] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1685.287044] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] for r in self._pagination(collection, path, **params): [ 1685.287044] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1685.287044] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] res = self.get(path, params=params) [ 1685.287044] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1685.287044] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] ret = obj(*args, **kwargs) [ 1685.287044] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1685.287044] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] return self.retry_request("GET", action, body=body, [ 1685.287044] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1685.287044] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] ret = obj(*args, **kwargs) [ 1685.287044] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1685.287044] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] return self.do_request(method, action, body=body, [ 1685.287444] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1685.287444] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] ret = obj(*args, **kwargs) [ 1685.287444] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1685.287444] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] self._handle_fault_response(status_code, replybody, resp) [ 1685.287444] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 1685.287444] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] raise exception.Unauthorized() [ 1685.287444] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] nova.exception.Unauthorized: Not authorized. [ 1685.287444] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] [ 1685.343158] env[68040]: INFO nova.scheduler.client.report [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Deleted allocations for instance 57cd94c2-aec3-427e-9b9f-a444fe291974 [ 1685.361948] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4ae9303c-347d-41fa-8918-ad8caaf4b7d4 tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Lock "57cd94c2-aec3-427e-9b9f-a444fe291974" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 614.860s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1685.363062] env[68040]: DEBUG oslo_concurrency.lockutils [None req-24a890f8-ebec-46a5-8009-8a91cda7c5bf tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Lock "57cd94c2-aec3-427e-9b9f-a444fe291974" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 418.865s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1685.363286] env[68040]: DEBUG oslo_concurrency.lockutils [None req-24a890f8-ebec-46a5-8009-8a91cda7c5bf tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Acquiring lock "57cd94c2-aec3-427e-9b9f-a444fe291974-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1685.363497] env[68040]: DEBUG oslo_concurrency.lockutils [None req-24a890f8-ebec-46a5-8009-8a91cda7c5bf tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Lock "57cd94c2-aec3-427e-9b9f-a444fe291974-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1685.363665] env[68040]: DEBUG oslo_concurrency.lockutils [None req-24a890f8-ebec-46a5-8009-8a91cda7c5bf tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Lock "57cd94c2-aec3-427e-9b9f-a444fe291974-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1685.365862] env[68040]: INFO nova.compute.manager [None req-24a890f8-ebec-46a5-8009-8a91cda7c5bf tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Terminating instance [ 1685.367569] env[68040]: DEBUG nova.compute.manager [None req-24a890f8-ebec-46a5-8009-8a91cda7c5bf tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1685.367836] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-24a890f8-ebec-46a5-8009-8a91cda7c5bf tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1685.368452] env[68040]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b8870340-0c2d-4481-9bc2-8ef07629dbf5 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.375729] env[68040]: DEBUG nova.compute.manager [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1685.383289] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63810d1e-5300-40bd-98f5-0957a9ad9e7f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.412667] env[68040]: WARNING nova.virt.vmwareapi.vmops [None req-24a890f8-ebec-46a5-8009-8a91cda7c5bf tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 57cd94c2-aec3-427e-9b9f-a444fe291974 could not be found. [ 1685.412922] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-24a890f8-ebec-46a5-8009-8a91cda7c5bf tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1685.413125] env[68040]: INFO nova.compute.manager [None req-24a890f8-ebec-46a5-8009-8a91cda7c5bf tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1685.413379] env[68040]: DEBUG oslo.service.loopingcall [None req-24a890f8-ebec-46a5-8009-8a91cda7c5bf tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1685.417705] env[68040]: DEBUG nova.compute.manager [-] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1685.417811] env[68040]: DEBUG nova.network.neutron [-] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1685.431970] env[68040]: DEBUG oslo_concurrency.lockutils [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1685.432217] env[68040]: DEBUG oslo_concurrency.lockutils [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1685.433649] env[68040]: INFO nova.compute.claims [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1685.504310] env[68040]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=68040) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1685.504541] env[68040]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1685.505046] env[68040]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1685.505046] env[68040]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1685.505046] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1685.505046] env[68040]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1685.505046] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1685.505046] env[68040]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1685.505046] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1685.505046] env[68040]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1685.505046] env[68040]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1685.505046] env[68040]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-5c30681f-f140-4226-b31d-05dc29c2c23a'] [ 1685.505046] env[68040]: ERROR oslo.service.loopingcall [ 1685.505046] env[68040]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1685.505046] env[68040]: ERROR oslo.service.loopingcall [ 1685.505046] env[68040]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1685.505046] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1685.505046] env[68040]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1685.505519] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1685.505519] env[68040]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1685.505519] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1685.505519] env[68040]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1685.505519] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1685.505519] env[68040]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1685.505519] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1685.505519] env[68040]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1685.505519] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1685.505519] env[68040]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1685.505519] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1685.505519] env[68040]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1685.505519] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1685.505519] env[68040]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1685.505519] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1685.505519] env[68040]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1685.505519] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1685.505519] env[68040]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1685.506032] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1685.506032] env[68040]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1685.506032] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1685.506032] env[68040]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1685.506032] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1685.506032] env[68040]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1685.506032] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1685.506032] env[68040]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1685.506032] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1685.506032] env[68040]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1685.506032] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1685.506032] env[68040]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1685.506032] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1685.506032] env[68040]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1685.506032] env[68040]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1685.506032] env[68040]: ERROR oslo.service.loopingcall [ 1685.506462] env[68040]: ERROR nova.compute.manager [None req-24a890f8-ebec-46a5-8009-8a91cda7c5bf tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1685.533474] env[68040]: ERROR nova.compute.manager [None req-24a890f8-ebec-46a5-8009-8a91cda7c5bf tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1685.533474] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Traceback (most recent call last): [ 1685.533474] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1685.533474] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] ret = obj(*args, **kwargs) [ 1685.533474] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1685.533474] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] exception_handler_v20(status_code, error_body) [ 1685.533474] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1685.533474] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] raise client_exc(message=error_message, [ 1685.533474] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1685.533474] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Neutron server returns request_ids: ['req-5c30681f-f140-4226-b31d-05dc29c2c23a'] [ 1685.533852] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] [ 1685.533852] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] During handling of the above exception, another exception occurred: [ 1685.533852] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] [ 1685.533852] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Traceback (most recent call last): [ 1685.533852] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1685.533852] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] self._delete_instance(context, instance, bdms) [ 1685.533852] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1685.533852] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] self._shutdown_instance(context, instance, bdms) [ 1685.533852] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1685.533852] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] self._try_deallocate_network(context, instance, requested_networks) [ 1685.533852] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1685.533852] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] with excutils.save_and_reraise_exception(): [ 1685.533852] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1685.533852] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] self.force_reraise() [ 1685.534284] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1685.534284] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] raise self.value [ 1685.534284] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1685.534284] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] _deallocate_network_with_retries() [ 1685.534284] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1685.534284] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] return evt.wait() [ 1685.534284] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1685.534284] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] result = hub.switch() [ 1685.534284] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1685.534284] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] return self.greenlet.switch() [ 1685.534284] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1685.534284] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] result = func(*self.args, **self.kw) [ 1685.534626] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1685.534626] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] result = f(*args, **kwargs) [ 1685.534626] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1685.534626] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] self._deallocate_network( [ 1685.534626] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1685.534626] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] self.network_api.deallocate_for_instance( [ 1685.534626] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1685.534626] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] data = neutron.list_ports(**search_opts) [ 1685.534626] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1685.534626] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] ret = obj(*args, **kwargs) [ 1685.534626] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1685.534626] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] return self.list('ports', self.ports_path, retrieve_all, [ 1685.534626] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1685.535015] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] ret = obj(*args, **kwargs) [ 1685.535015] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1685.535015] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] for r in self._pagination(collection, path, **params): [ 1685.535015] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1685.535015] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] res = self.get(path, params=params) [ 1685.535015] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1685.535015] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] ret = obj(*args, **kwargs) [ 1685.535015] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1685.535015] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] return self.retry_request("GET", action, body=body, [ 1685.535015] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1685.535015] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] ret = obj(*args, **kwargs) [ 1685.535015] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1685.535015] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] return self.do_request(method, action, body=body, [ 1685.535387] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1685.535387] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] ret = obj(*args, **kwargs) [ 1685.535387] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1685.535387] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] self._handle_fault_response(status_code, replybody, resp) [ 1685.535387] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1685.535387] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1685.535387] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1685.535387] env[68040]: ERROR nova.compute.manager [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] [ 1685.559612] env[68040]: DEBUG oslo_concurrency.lockutils [None req-24a890f8-ebec-46a5-8009-8a91cda7c5bf tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Lock "57cd94c2-aec3-427e-9b9f-a444fe291974" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.197s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1685.610668] env[68040]: INFO nova.compute.manager [None req-24a890f8-ebec-46a5-8009-8a91cda7c5bf tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] [instance: 57cd94c2-aec3-427e-9b9f-a444fe291974] Successfully reverted task state from None on failure for instance. [ 1685.614543] env[68040]: ERROR oslo_messaging.rpc.server [None req-24a890f8-ebec-46a5-8009-8a91cda7c5bf tempest-ServersAdminNegativeTestJSON-1786082038 tempest-ServersAdminNegativeTestJSON-1786082038-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1685.614543] env[68040]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1685.614543] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1685.614543] env[68040]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1685.614543] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1685.614543] env[68040]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1685.614543] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1685.614543] env[68040]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1685.614543] env[68040]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1685.614543] env[68040]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-5c30681f-f140-4226-b31d-05dc29c2c23a'] [ 1685.614543] env[68040]: ERROR oslo_messaging.rpc.server [ 1685.614543] env[68040]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1685.614543] env[68040]: ERROR oslo_messaging.rpc.server [ 1685.614543] env[68040]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1685.614543] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1685.615131] env[68040]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1685.615131] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1685.615131] env[68040]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1685.615131] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1685.615131] env[68040]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1685.615131] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1685.615131] env[68040]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1685.615131] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1685.615131] env[68040]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1685.615131] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1685.615131] env[68040]: ERROR oslo_messaging.rpc.server raise self.value [ 1685.615131] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1685.615131] env[68040]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1685.615131] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1685.615131] env[68040]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1685.615131] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1685.615131] env[68040]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1685.615131] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1685.615642] env[68040]: ERROR oslo_messaging.rpc.server raise self.value [ 1685.615642] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1685.615642] env[68040]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1685.615642] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1685.615642] env[68040]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1685.615642] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1685.615642] env[68040]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1685.615642] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1685.615642] env[68040]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1685.615642] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1685.615642] env[68040]: ERROR oslo_messaging.rpc.server raise self.value [ 1685.615642] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1685.615642] env[68040]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1685.615642] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3344, in terminate_instance [ 1685.615642] env[68040]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1685.615642] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1685.615642] env[68040]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1685.615642] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3339, in do_terminate_instance [ 1685.616171] env[68040]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1685.616171] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1685.616171] env[68040]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1685.616171] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1685.616171] env[68040]: ERROR oslo_messaging.rpc.server raise self.value [ 1685.616171] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1685.616171] env[68040]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1685.616171] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1685.616171] env[68040]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1685.616171] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1685.616171] env[68040]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1685.616171] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1685.616171] env[68040]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1685.616171] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1685.616171] env[68040]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1685.616171] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1685.616171] env[68040]: ERROR oslo_messaging.rpc.server raise self.value [ 1685.616171] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1685.616648] env[68040]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1685.616648] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1685.616648] env[68040]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1685.616648] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1685.616648] env[68040]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1685.616648] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1685.616648] env[68040]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1685.616648] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1685.616648] env[68040]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1685.616648] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1685.616648] env[68040]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1685.616648] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1685.616648] env[68040]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1685.616648] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1685.616648] env[68040]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1685.616648] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1685.616648] env[68040]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1685.616648] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1685.617146] env[68040]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1685.617146] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1685.617146] env[68040]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1685.617146] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1685.617146] env[68040]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1685.617146] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1685.617146] env[68040]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1685.617146] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1685.617146] env[68040]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1685.617146] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1685.617146] env[68040]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1685.617146] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1685.617146] env[68040]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1685.617146] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1685.617146] env[68040]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1685.617146] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1685.617146] env[68040]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1685.617146] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1685.617652] env[68040]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1685.617652] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1685.617652] env[68040]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1685.617652] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1685.617652] env[68040]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1685.617652] env[68040]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1685.617652] env[68040]: ERROR oslo_messaging.rpc.server [ 1685.638140] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a9d3686-9b59-44bb-84d9-c3a6fd2e2470 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.645727] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6434159-a794-4af1-85a9-846a452712e5 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.676431] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4aafa9b4-1244-4437-a325-6753a4ee30ee {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.683239] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c8755c0-6e45-42b8-a917-d4f8bcf80b70 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.695803] env[68040]: DEBUG nova.compute.provider_tree [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1685.704096] env[68040]: DEBUG nova.scheduler.client.report [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1685.716922] env[68040]: DEBUG oslo_concurrency.lockutils [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.285s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1685.717377] env[68040]: DEBUG nova.compute.manager [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Start building networks asynchronously for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1685.746171] env[68040]: DEBUG nova.compute.utils [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Using /dev/sd instead of None {{(pid=68040) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1685.747272] env[68040]: DEBUG nova.compute.manager [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Allocating IP information in the background. {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1685.747439] env[68040]: DEBUG nova.network.neutron [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] allocate_for_instance() {{(pid=68040) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1685.756525] env[68040]: DEBUG nova.compute.manager [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Start building block device mappings for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1685.811637] env[68040]: DEBUG nova.policy [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4fdca25678784d958a467db4ecfc929a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '784658a8924c403e88fbd9cc3ff787a0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68040) authorize /opt/stack/nova/nova/policy.py:203}} [ 1685.818027] env[68040]: DEBUG nova.compute.manager [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Start spawning the instance on the hypervisor. {{(pid=68040) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1685.843585] env[68040]: DEBUG nova.virt.hardware [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:59:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:59:33Z,direct_url=,disk_format='vmdk',id=8c308313-03d5-40b6-a5fe-9037e32dc76e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0770d674a39c40089de0aade9440b370',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:59:34Z,virtual_size=,visibility=), allow threads: False {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1685.843834] env[68040]: DEBUG nova.virt.hardware [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Flavor limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1685.843992] env[68040]: DEBUG nova.virt.hardware [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Image limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1685.844199] env[68040]: DEBUG nova.virt.hardware [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Flavor pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1685.844348] env[68040]: DEBUG nova.virt.hardware [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Image pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1685.844496] env[68040]: DEBUG nova.virt.hardware [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1685.844701] env[68040]: DEBUG nova.virt.hardware [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1685.844865] env[68040]: DEBUG nova.virt.hardware [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1685.845048] env[68040]: DEBUG nova.virt.hardware [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Got 1 possible topologies {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1685.845226] env[68040]: DEBUG nova.virt.hardware [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1685.845406] env[68040]: DEBUG nova.virt.hardware [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1685.846282] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ba5f371-4ccf-4477-8f98-cbc48d43eef3 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.854381] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5969f6b3-752b-4740-8944-df9585ff11ac {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.193404] env[68040]: DEBUG nova.network.neutron [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Successfully created port: 7d8e1b99-f653-4d42-9ad8-bff0c011a339 {{(pid=68040) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1686.942598] env[68040]: DEBUG nova.compute.manager [req-6d772319-56e8-4aad-a327-a1de21d0251e req-e52830e1-5ece-428f-9d39-74c5ecdae726 service nova] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Received event network-vif-plugged-7d8e1b99-f653-4d42-9ad8-bff0c011a339 {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1686.942884] env[68040]: DEBUG oslo_concurrency.lockutils [req-6d772319-56e8-4aad-a327-a1de21d0251e req-e52830e1-5ece-428f-9d39-74c5ecdae726 service nova] Acquiring lock "2e44ead1-4676-4d9b-bbae-5082f505fc8b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1686.943081] env[68040]: DEBUG oslo_concurrency.lockutils [req-6d772319-56e8-4aad-a327-a1de21d0251e req-e52830e1-5ece-428f-9d39-74c5ecdae726 service nova] Lock "2e44ead1-4676-4d9b-bbae-5082f505fc8b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1686.943452] env[68040]: DEBUG oslo_concurrency.lockutils [req-6d772319-56e8-4aad-a327-a1de21d0251e req-e52830e1-5ece-428f-9d39-74c5ecdae726 service nova] Lock "2e44ead1-4676-4d9b-bbae-5082f505fc8b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1686.943452] env[68040]: DEBUG nova.compute.manager [req-6d772319-56e8-4aad-a327-a1de21d0251e req-e52830e1-5ece-428f-9d39-74c5ecdae726 service nova] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] No waiting events found dispatching network-vif-plugged-7d8e1b99-f653-4d42-9ad8-bff0c011a339 {{(pid=68040) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1686.943580] env[68040]: WARNING nova.compute.manager [req-6d772319-56e8-4aad-a327-a1de21d0251e req-e52830e1-5ece-428f-9d39-74c5ecdae726 service nova] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Received unexpected event network-vif-plugged-7d8e1b99-f653-4d42-9ad8-bff0c011a339 for instance with vm_state building and task_state spawning. [ 1687.022291] env[68040]: DEBUG nova.network.neutron [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Successfully updated port: 7d8e1b99-f653-4d42-9ad8-bff0c011a339 {{(pid=68040) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1687.035794] env[68040]: DEBUG oslo_concurrency.lockutils [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Acquiring lock "refresh_cache-2e44ead1-4676-4d9b-bbae-5082f505fc8b" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1687.035902] env[68040]: DEBUG oslo_concurrency.lockutils [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Acquired lock "refresh_cache-2e44ead1-4676-4d9b-bbae-5082f505fc8b" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1687.036070] env[68040]: DEBUG nova.network.neutron [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Building network info cache for instance {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1687.076581] env[68040]: DEBUG nova.network.neutron [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1687.237554] env[68040]: DEBUG nova.network.neutron [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Updating instance_info_cache with network_info: [{"id": "7d8e1b99-f653-4d42-9ad8-bff0c011a339", "address": "fa:16:3e:af:84:d1", "network": {"id": "0977b41e-8a7d-4917-a780-9c1eb7453e4c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1372225524-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "784658a8924c403e88fbd9cc3ff787a0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d8e1b99-f6", "ovs_interfaceid": "7d8e1b99-f653-4d42-9ad8-bff0c011a339", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1687.253022] env[68040]: DEBUG oslo_concurrency.lockutils [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Releasing lock "refresh_cache-2e44ead1-4676-4d9b-bbae-5082f505fc8b" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1687.253022] env[68040]: DEBUG nova.compute.manager [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Instance network_info: |[{"id": "7d8e1b99-f653-4d42-9ad8-bff0c011a339", "address": "fa:16:3e:af:84:d1", "network": {"id": "0977b41e-8a7d-4917-a780-9c1eb7453e4c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1372225524-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "784658a8924c403e88fbd9cc3ff787a0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d8e1b99-f6", "ovs_interfaceid": "7d8e1b99-f653-4d42-9ad8-bff0c011a339", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1687.253468] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:af:84:d1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '680cb499-2a47-482b-af0d-112016ac0e17', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7d8e1b99-f653-4d42-9ad8-bff0c011a339', 'vif_model': 'vmxnet3'}] {{(pid=68040) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1687.258829] env[68040]: DEBUG oslo.service.loopingcall [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1687.259326] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Creating VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1687.259549] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-50db3438-b853-41b2-b6b3-edf491789dcb {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.280070] env[68040]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1687.280070] env[68040]: value = "task-3200312" [ 1687.280070] env[68040]: _type = "Task" [ 1687.280070] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1687.289244] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200312, 'name': CreateVM_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1687.790405] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200312, 'name': CreateVM_Task, 'duration_secs': 0.291414} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1687.790548] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Created VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1687.791222] env[68040]: DEBUG oslo_concurrency.lockutils [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1687.791389] env[68040]: DEBUG oslo_concurrency.lockutils [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1687.791726] env[68040]: DEBUG oslo_concurrency.lockutils [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1687.791974] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a479fa60-0f22-48f7-b8a8-437a16112ea1 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.796244] env[68040]: DEBUG oslo_vmware.api [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Waiting for the task: (returnval){ [ 1687.796244] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]5286ac71-03f3-6900-21b5-f2d0508ccc5f" [ 1687.796244] env[68040]: _type = "Task" [ 1687.796244] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1687.803319] env[68040]: DEBUG oslo_vmware.api [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]5286ac71-03f3-6900-21b5-f2d0508ccc5f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.306974] env[68040]: DEBUG oslo_concurrency.lockutils [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1688.307482] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Processing image 8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1688.307610] env[68040]: DEBUG oslo_concurrency.lockutils [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1688.969509] env[68040]: DEBUG nova.compute.manager [req-1a5a34ed-958f-4711-92eb-0efe18106472 req-3c6ed4e1-644c-4eed-9f1b-e5ae7e64e544 service nova] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Received event network-changed-7d8e1b99-f653-4d42-9ad8-bff0c011a339 {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1688.969749] env[68040]: DEBUG nova.compute.manager [req-1a5a34ed-958f-4711-92eb-0efe18106472 req-3c6ed4e1-644c-4eed-9f1b-e5ae7e64e544 service nova] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Refreshing instance network info cache due to event network-changed-7d8e1b99-f653-4d42-9ad8-bff0c011a339. {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1688.969982] env[68040]: DEBUG oslo_concurrency.lockutils [req-1a5a34ed-958f-4711-92eb-0efe18106472 req-3c6ed4e1-644c-4eed-9f1b-e5ae7e64e544 service nova] Acquiring lock "refresh_cache-2e44ead1-4676-4d9b-bbae-5082f505fc8b" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1688.970141] env[68040]: DEBUG oslo_concurrency.lockutils [req-1a5a34ed-958f-4711-92eb-0efe18106472 req-3c6ed4e1-644c-4eed-9f1b-e5ae7e64e544 service nova] Acquired lock "refresh_cache-2e44ead1-4676-4d9b-bbae-5082f505fc8b" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1688.970316] env[68040]: DEBUG nova.network.neutron [req-1a5a34ed-958f-4711-92eb-0efe18106472 req-3c6ed4e1-644c-4eed-9f1b-e5ae7e64e544 service nova] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Refreshing network info cache for port 7d8e1b99-f653-4d42-9ad8-bff0c011a339 {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1689.396780] env[68040]: DEBUG nova.network.neutron [req-1a5a34ed-958f-4711-92eb-0efe18106472 req-3c6ed4e1-644c-4eed-9f1b-e5ae7e64e544 service nova] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Updated VIF entry in instance network info cache for port 7d8e1b99-f653-4d42-9ad8-bff0c011a339. {{(pid=68040) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1689.397179] env[68040]: DEBUG nova.network.neutron [req-1a5a34ed-958f-4711-92eb-0efe18106472 req-3c6ed4e1-644c-4eed-9f1b-e5ae7e64e544 service nova] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Updating instance_info_cache with network_info: [{"id": "7d8e1b99-f653-4d42-9ad8-bff0c011a339", "address": "fa:16:3e:af:84:d1", "network": {"id": "0977b41e-8a7d-4917-a780-9c1eb7453e4c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1372225524-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "784658a8924c403e88fbd9cc3ff787a0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "680cb499-2a47-482b-af0d-112016ac0e17", "external-id": "nsx-vlan-transportzone-644", "segmentation_id": 644, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d8e1b99-f6", "ovs_interfaceid": "7d8e1b99-f653-4d42-9ad8-bff0c011a339", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1689.407535] env[68040]: DEBUG oslo_concurrency.lockutils [req-1a5a34ed-958f-4711-92eb-0efe18106472 req-3c6ed4e1-644c-4eed-9f1b-e5ae7e64e544 service nova] Releasing lock "refresh_cache-2e44ead1-4676-4d9b-bbae-5082f505fc8b" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1689.983674] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1691.207683] env[68040]: DEBUG oslo_concurrency.lockutils [None req-6815c47a-688c-443e-baec-05b40836ccf9 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Acquiring lock "031481de-d52f-4f3f-80e5-0d0d6803d624" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1692.983862] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1695.984263] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1695.984555] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1695.984691] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1695.996471] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1695.996679] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1695.996853] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1695.997023] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68040) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1695.998126] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1de825c-4629-40a5-bd35-b78ba1797065 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.007293] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1023f44c-6ef8-4d5c-b19d-f74579ae43cc {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.023021] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4de442c-20a7-4657-82bc-08b6e14dacac {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.029422] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9bcecb6-4ba2-4193-817c-a7bb692c1b61 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.057675] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181000MB free_disk=125GB free_vcpus=48 pci_devices=None {{(pid=68040) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1696.057831] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1696.058038] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1696.128293] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 87a7851e-d6fe-481a-8abb-5732e281cb64 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1696.128461] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance c581d685-7ea0-41f8-b911-ff1dce1b46c7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1696.128591] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 4a08d3e3-5e84-4f34-b418-2c18eadbef25 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1696.128717] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 268b5613-b132-49ed-a45b-bc88132177cf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1696.128840] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 4dfa01f8-53a0-4ee4-9b00-93017144ea0b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1696.128962] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1696.129095] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1696.129218] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 221a5bbe-7168-4f5c-ab49-8a149545655f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1696.129337] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 031481de-d52f-4f3f-80e5-0d0d6803d624 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1696.129457] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 2e44ead1-4676-4d9b-bbae-5082f505fc8b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1696.140499] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance f89a378a-376a-48d0-a01b-75c5bb4d8cd9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1696.150344] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 0210d9d4-2161-4b06-bc81-9de361accca6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1696.150571] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1696.150707] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1696.290213] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a07c8f72-07b0-42f2-9ba0-04a0c196b692 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.297724] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d3a3a35-9b04-4d0e-b3d8-6a198685eb00 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.328156] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abad81e5-9649-4ff0-91de-4ea5713ce06e {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.335293] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa4f4e00-74e7-41d4-9061-905012f1cd67 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.347745] env[68040]: DEBUG nova.compute.provider_tree [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1696.356434] env[68040]: DEBUG nova.scheduler.client.report [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1696.370373] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68040) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1696.370558] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.313s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1697.370727] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1697.371164] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Starting heal instance info cache {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1697.371164] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Rebuilding the list of instances to heal {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1697.390682] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1697.390833] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1697.390964] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1697.391106] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1697.391235] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1697.391359] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1697.391480] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1697.391599] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1697.391719] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1697.391867] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1697.391997] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Didn't find any instances for network info cache update. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1698.983697] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1699.984025] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1699.984334] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68040) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1701.678019] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e1fee5c4-78a7-4d1f-b4c8-ea66baa8d6ba tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Acquiring lock "2e44ead1-4676-4d9b-bbae-5082f505fc8b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1701.980039] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1705.979212] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1720.192537] env[68040]: DEBUG oslo_concurrency.lockutils [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Acquiring lock "8f9a6934-9ded-4561-8d83-aacd4d79f29a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1720.192891] env[68040]: DEBUG oslo_concurrency.lockutils [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Lock "8f9a6934-9ded-4561-8d83-aacd4d79f29a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1734.295446] env[68040]: WARNING oslo_vmware.rw_handles [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1734.295446] env[68040]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1734.295446] env[68040]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1734.295446] env[68040]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1734.295446] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1734.295446] env[68040]: ERROR oslo_vmware.rw_handles response.begin() [ 1734.295446] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1734.295446] env[68040]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1734.295446] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1734.295446] env[68040]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1734.295446] env[68040]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1734.295446] env[68040]: ERROR oslo_vmware.rw_handles [ 1734.296239] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Downloaded image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to vmware_temp/00131ba7-40d9-405a-b8c0-325b69c93d46/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1734.297956] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Caching image {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1734.298225] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Copying Virtual Disk [datastore2] vmware_temp/00131ba7-40d9-405a-b8c0-325b69c93d46/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk to [datastore2] vmware_temp/00131ba7-40d9-405a-b8c0-325b69c93d46/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk {{(pid=68040) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1734.298519] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-829a0e93-bfcb-43c2-a9c5-b2ab9a0d1303 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.308143] env[68040]: DEBUG oslo_vmware.api [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Waiting for the task: (returnval){ [ 1734.308143] env[68040]: value = "task-3200313" [ 1734.308143] env[68040]: _type = "Task" [ 1734.308143] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1734.316437] env[68040]: DEBUG oslo_vmware.api [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Task: {'id': task-3200313, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1734.818696] env[68040]: DEBUG oslo_vmware.exceptions [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Fault InvalidArgument not matched. {{(pid=68040) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1734.818970] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1734.819538] env[68040]: ERROR nova.compute.manager [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1734.819538] env[68040]: Faults: ['InvalidArgument'] [ 1734.819538] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Traceback (most recent call last): [ 1734.819538] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1734.819538] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] yield resources [ 1734.819538] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1734.819538] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] self.driver.spawn(context, instance, image_meta, [ 1734.819538] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1734.819538] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1734.819538] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1734.819538] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] self._fetch_image_if_missing(context, vi) [ 1734.819538] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1734.819957] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] image_cache(vi, tmp_image_ds_loc) [ 1734.819957] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1734.819957] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] vm_util.copy_virtual_disk( [ 1734.819957] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1734.819957] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] session._wait_for_task(vmdk_copy_task) [ 1734.819957] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1734.819957] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] return self.wait_for_task(task_ref) [ 1734.819957] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1734.819957] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] return evt.wait() [ 1734.819957] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1734.819957] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] result = hub.switch() [ 1734.819957] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1734.819957] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] return self.greenlet.switch() [ 1734.820396] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1734.820396] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] self.f(*self.args, **self.kw) [ 1734.820396] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1734.820396] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] raise exceptions.translate_fault(task_info.error) [ 1734.820396] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1734.820396] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Faults: ['InvalidArgument'] [ 1734.820396] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] [ 1734.820396] env[68040]: INFO nova.compute.manager [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Terminating instance [ 1734.821438] env[68040]: DEBUG oslo_concurrency.lockutils [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1734.821663] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1734.821898] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7f2ff745-7505-4801-8230-a4b8b98b85e2 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.826425] env[68040]: DEBUG nova.compute.manager [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1734.826425] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1734.827098] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b53d43e-5e58-4887-bb3b-8011777b50a9 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.830597] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1734.831241] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68040) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1734.831739] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-539b7efa-f1d3-4d6f-a8ad-ae7ff0e3a815 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.835760] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Unregistering the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1734.836181] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7884191f-3094-49ab-b7f9-70aa47435572 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.838401] env[68040]: DEBUG oslo_vmware.api [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Waiting for the task: (returnval){ [ 1734.838401] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52e77daf-8542-0e5c-0687-ed028e87fb6c" [ 1734.838401] env[68040]: _type = "Task" [ 1734.838401] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1734.845798] env[68040]: DEBUG oslo_vmware.api [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52e77daf-8542-0e5c-0687-ed028e87fb6c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1734.901799] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Unregistered the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1734.901992] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Deleting contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1734.902181] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Deleting the datastore file [datastore2] 87a7851e-d6fe-481a-8abb-5732e281cb64 {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1734.902434] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5b79e454-1bb5-4c0b-807c-0d44e6f1a000 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.908225] env[68040]: DEBUG oslo_vmware.api [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Waiting for the task: (returnval){ [ 1734.908225] env[68040]: value = "task-3200315" [ 1734.908225] env[68040]: _type = "Task" [ 1734.908225] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1734.915696] env[68040]: DEBUG oslo_vmware.api [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Task: {'id': task-3200315, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1735.349556] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Preparing fetch location {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1735.349953] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Creating directory with path [datastore2] vmware_temp/ba54d36e-e14d-43da-beeb-a757240f76b3/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1735.350120] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a12d7d68-2374-4dab-8c57-49626fa61f29 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.361026] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Created directory with path [datastore2] vmware_temp/ba54d36e-e14d-43da-beeb-a757240f76b3/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1735.361243] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Fetch image to [datastore2] vmware_temp/ba54d36e-e14d-43da-beeb-a757240f76b3/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1735.361449] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to [datastore2] vmware_temp/ba54d36e-e14d-43da-beeb-a757240f76b3/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1735.362224] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82a2bd48-c93a-4285-9b7b-569bac63696c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.368824] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2019207-4890-4f96-b2dd-f644cde04ed7 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.377548] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fbeba87-6e58-457c-9a28-e0611c06d933 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.406494] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1b2ef14-5861-46da-bef6-258df0ffff84 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.417618] env[68040]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-43f508fd-0c7d-42ce-be39-76656af92f75 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.419243] env[68040]: DEBUG oslo_vmware.api [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Task: {'id': task-3200315, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.076259} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1735.419476] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Deleted the datastore file {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1735.419651] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Deleted contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1735.419823] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1735.420036] env[68040]: INFO nova.compute.manager [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Took 0.59 seconds to destroy the instance on the hypervisor. [ 1735.424915] env[68040]: DEBUG nova.compute.claims [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Aborting claim: {{(pid=68040) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1735.425103] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1735.425319] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1735.445809] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1735.500041] env[68040]: DEBUG oslo_vmware.rw_handles [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ba54d36e-e14d-43da-beeb-a757240f76b3/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1735.565988] env[68040]: DEBUG oslo_vmware.rw_handles [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Completed reading data from the image iterator. {{(pid=68040) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1735.565988] env[68040]: DEBUG oslo_vmware.rw_handles [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ba54d36e-e14d-43da-beeb-a757240f76b3/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1735.681032] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a44ab8e9-e9c2-48fc-9020-e37656416af6 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.687946] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58578429-1fe2-476c-ae58-db60623c4ce0 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.717206] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d72104a-95f9-42c7-a7fb-f97079c5f773 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.723878] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a8bbbb2-10fb-40dd-b651-c6063b750918 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.736833] env[68040]: DEBUG nova.compute.provider_tree [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1735.746036] env[68040]: DEBUG nova.scheduler.client.report [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1735.760088] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.335s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1735.760626] env[68040]: ERROR nova.compute.manager [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1735.760626] env[68040]: Faults: ['InvalidArgument'] [ 1735.760626] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Traceback (most recent call last): [ 1735.760626] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1735.760626] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] self.driver.spawn(context, instance, image_meta, [ 1735.760626] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1735.760626] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1735.760626] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1735.760626] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] self._fetch_image_if_missing(context, vi) [ 1735.760626] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1735.760626] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] image_cache(vi, tmp_image_ds_loc) [ 1735.760626] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1735.761020] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] vm_util.copy_virtual_disk( [ 1735.761020] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1735.761020] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] session._wait_for_task(vmdk_copy_task) [ 1735.761020] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1735.761020] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] return self.wait_for_task(task_ref) [ 1735.761020] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1735.761020] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] return evt.wait() [ 1735.761020] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1735.761020] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] result = hub.switch() [ 1735.761020] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1735.761020] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] return self.greenlet.switch() [ 1735.761020] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1735.761020] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] self.f(*self.args, **self.kw) [ 1735.761373] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1735.761373] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] raise exceptions.translate_fault(task_info.error) [ 1735.761373] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1735.761373] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Faults: ['InvalidArgument'] [ 1735.761373] env[68040]: ERROR nova.compute.manager [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] [ 1735.761373] env[68040]: DEBUG nova.compute.utils [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] VimFaultException {{(pid=68040) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1735.762804] env[68040]: DEBUG nova.compute.manager [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Build of instance 87a7851e-d6fe-481a-8abb-5732e281cb64 was re-scheduled: A specified parameter was not correct: fileType [ 1735.762804] env[68040]: Faults: ['InvalidArgument'] {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1735.763270] env[68040]: DEBUG nova.compute.manager [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Unplugging VIFs for instance {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1735.763456] env[68040]: DEBUG nova.compute.manager [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1735.763631] env[68040]: DEBUG nova.compute.manager [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1735.763811] env[68040]: DEBUG nova.network.neutron [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1736.107203] env[68040]: DEBUG nova.network.neutron [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1736.119776] env[68040]: INFO nova.compute.manager [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Took 0.36 seconds to deallocate network for instance. [ 1736.217029] env[68040]: INFO nova.scheduler.client.report [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Deleted allocations for instance 87a7851e-d6fe-481a-8abb-5732e281cb64 [ 1736.238984] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e7dd286c-fbac-40a4-b4d7-742d4dcf6a8f tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Lock "87a7851e-d6fe-481a-8abb-5732e281cb64" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 664.584s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1736.240148] env[68040]: DEBUG oslo_concurrency.lockutils [None req-34745a56-f48b-48e5-ad4e-cebcb2d024c9 tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Lock "87a7851e-d6fe-481a-8abb-5732e281cb64" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 468.825s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1736.240388] env[68040]: DEBUG oslo_concurrency.lockutils [None req-34745a56-f48b-48e5-ad4e-cebcb2d024c9 tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Acquiring lock "87a7851e-d6fe-481a-8abb-5732e281cb64-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1736.240942] env[68040]: DEBUG oslo_concurrency.lockutils [None req-34745a56-f48b-48e5-ad4e-cebcb2d024c9 tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Lock "87a7851e-d6fe-481a-8abb-5732e281cb64-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1736.240942] env[68040]: DEBUG oslo_concurrency.lockutils [None req-34745a56-f48b-48e5-ad4e-cebcb2d024c9 tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Lock "87a7851e-d6fe-481a-8abb-5732e281cb64-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1736.243438] env[68040]: INFO nova.compute.manager [None req-34745a56-f48b-48e5-ad4e-cebcb2d024c9 tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Terminating instance [ 1736.245209] env[68040]: DEBUG nova.compute.manager [None req-34745a56-f48b-48e5-ad4e-cebcb2d024c9 tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1736.245407] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-34745a56-f48b-48e5-ad4e-cebcb2d024c9 tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1736.245868] env[68040]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-74d6c31e-775a-47bc-ab07-254dca053e53 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.255151] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04dfc763-3fdd-4e5f-9e5a-56056f2bb308 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.266235] env[68040]: DEBUG nova.compute.manager [None req-1ab7bc51-6f8f-43ee-a690-498424423e90 tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] [instance: f89a378a-376a-48d0-a01b-75c5bb4d8cd9] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1736.287203] env[68040]: WARNING nova.virt.vmwareapi.vmops [None req-34745a56-f48b-48e5-ad4e-cebcb2d024c9 tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 87a7851e-d6fe-481a-8abb-5732e281cb64 could not be found. [ 1736.287431] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-34745a56-f48b-48e5-ad4e-cebcb2d024c9 tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1736.287613] env[68040]: INFO nova.compute.manager [None req-34745a56-f48b-48e5-ad4e-cebcb2d024c9 tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1736.287854] env[68040]: DEBUG oslo.service.loopingcall [None req-34745a56-f48b-48e5-ad4e-cebcb2d024c9 tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1736.288096] env[68040]: DEBUG nova.compute.manager [-] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1736.288197] env[68040]: DEBUG nova.network.neutron [-] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1736.295812] env[68040]: DEBUG nova.compute.manager [None req-1ab7bc51-6f8f-43ee-a690-498424423e90 tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] [instance: f89a378a-376a-48d0-a01b-75c5bb4d8cd9] Instance disappeared before build. {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2430}} [ 1736.313016] env[68040]: DEBUG nova.network.neutron [-] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1736.317025] env[68040]: DEBUG oslo_concurrency.lockutils [None req-1ab7bc51-6f8f-43ee-a690-498424423e90 tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Lock "f89a378a-376a-48d0-a01b-75c5bb4d8cd9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 213.016s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1736.320306] env[68040]: INFO nova.compute.manager [-] [instance: 87a7851e-d6fe-481a-8abb-5732e281cb64] Took 0.03 seconds to deallocate network for instance. [ 1736.324822] env[68040]: DEBUG nova.compute.manager [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1736.369099] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1736.369363] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1736.370814] env[68040]: INFO nova.compute.claims [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1736.406766] env[68040]: DEBUG oslo_concurrency.lockutils [None req-34745a56-f48b-48e5-ad4e-cebcb2d024c9 tempest-ServerActionsTestOtherB-250579351 tempest-ServerActionsTestOtherB-250579351-project-member] Lock "87a7851e-d6fe-481a-8abb-5732e281cb64" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.167s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1736.536525] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-960d25bf-9135-4141-a655-bd81a75f68ed {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.543730] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fedda35c-296f-4917-a80c-4c7e70163505 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.574867] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85e0654e-fa31-4a9d-9772-0aa43410003f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.581513] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3778b257-eaaf-41a1-af7d-39a163de8da2 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.596198] env[68040]: DEBUG nova.compute.provider_tree [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1736.605099] env[68040]: DEBUG nova.scheduler.client.report [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1736.620159] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.251s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1736.620619] env[68040]: DEBUG nova.compute.manager [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Start building networks asynchronously for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1736.652676] env[68040]: DEBUG nova.compute.utils [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Using /dev/sd instead of None {{(pid=68040) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1736.654130] env[68040]: DEBUG nova.compute.manager [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Allocating IP information in the background. {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1736.654304] env[68040]: DEBUG nova.network.neutron [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] allocate_for_instance() {{(pid=68040) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1736.663022] env[68040]: DEBUG nova.compute.manager [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Start building block device mappings for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1736.724673] env[68040]: DEBUG nova.compute.manager [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Start spawning the instance on the hypervisor. {{(pid=68040) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1736.749211] env[68040]: DEBUG nova.virt.hardware [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:59:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:59:33Z,direct_url=,disk_format='vmdk',id=8c308313-03d5-40b6-a5fe-9037e32dc76e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0770d674a39c40089de0aade9440b370',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:59:34Z,virtual_size=,visibility=), allow threads: False {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1736.749476] env[68040]: DEBUG nova.virt.hardware [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Flavor limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1736.749639] env[68040]: DEBUG nova.virt.hardware [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Image limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1736.749817] env[68040]: DEBUG nova.virt.hardware [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Flavor pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1736.749967] env[68040]: DEBUG nova.virt.hardware [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Image pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1736.750188] env[68040]: DEBUG nova.virt.hardware [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1736.750372] env[68040]: DEBUG nova.virt.hardware [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1736.750535] env[68040]: DEBUG nova.virt.hardware [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1736.750703] env[68040]: DEBUG nova.virt.hardware [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Got 1 possible topologies {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1736.750868] env[68040]: DEBUG nova.virt.hardware [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1736.751054] env[68040]: DEBUG nova.virt.hardware [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1736.751903] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cd6200f-907e-401e-b425-43ce82cf8fbb {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.755735] env[68040]: DEBUG nova.policy [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd5b784bb2384457e9bcc4e9ff02ea850', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9a2c3ee9bf1c40228a089e4b0e5bff00', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68040) authorize /opt/stack/nova/nova/policy.py:203}} [ 1736.762604] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e0a851e-6014-48f2-93d2-d9740c6f8250 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.220325] env[68040]: DEBUG nova.network.neutron [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Successfully created port: 54384cee-a218-4fe5-93e6-ae4e84ce4756 {{(pid=68040) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1737.782610] env[68040]: DEBUG nova.network.neutron [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Successfully updated port: 54384cee-a218-4fe5-93e6-ae4e84ce4756 {{(pid=68040) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1737.807100] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Acquiring lock "refresh_cache-0210d9d4-2161-4b06-bc81-9de361accca6" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1737.807100] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Acquired lock "refresh_cache-0210d9d4-2161-4b06-bc81-9de361accca6" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1737.807100] env[68040]: DEBUG nova.network.neutron [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Building network info cache for instance {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1737.902128] env[68040]: DEBUG nova.network.neutron [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1738.410756] env[68040]: DEBUG nova.network.neutron [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Updating instance_info_cache with network_info: [{"id": "54384cee-a218-4fe5-93e6-ae4e84ce4756", "address": "fa:16:3e:2e:59:be", "network": {"id": "9565e3df-4a40-4611-a5a9-efd2bc66053b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-780365588-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9a2c3ee9bf1c40228a089e4b0e5bff00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54384cee-a2", "ovs_interfaceid": "54384cee-a218-4fe5-93e6-ae4e84ce4756", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1738.423729] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Releasing lock "refresh_cache-0210d9d4-2161-4b06-bc81-9de361accca6" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1738.424138] env[68040]: DEBUG nova.compute.manager [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Instance network_info: |[{"id": "54384cee-a218-4fe5-93e6-ae4e84ce4756", "address": "fa:16:3e:2e:59:be", "network": {"id": "9565e3df-4a40-4611-a5a9-efd2bc66053b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-780365588-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9a2c3ee9bf1c40228a089e4b0e5bff00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54384cee-a2", "ovs_interfaceid": "54384cee-a218-4fe5-93e6-ae4e84ce4756", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1738.424494] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2e:59:be', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7aa6264-122d-4c35-82d0-860e451538ea', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '54384cee-a218-4fe5-93e6-ae4e84ce4756', 'vif_model': 'vmxnet3'}] {{(pid=68040) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1738.432449] env[68040]: DEBUG oslo.service.loopingcall [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1738.433053] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Creating VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1738.433289] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-eb648605-c29a-4b07-ba8f-5290c6d60300 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.453063] env[68040]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1738.453063] env[68040]: value = "task-3200316" [ 1738.453063] env[68040]: _type = "Task" [ 1738.453063] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1738.462305] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200316, 'name': CreateVM_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.547835] env[68040]: DEBUG nova.compute.manager [req-2cc2d584-044b-4d9a-b43d-dc7ad0e9ef53 req-fe4f2d1c-5bf4-44f4-b9a9-974cddf621a9 service nova] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Received event network-vif-plugged-54384cee-a218-4fe5-93e6-ae4e84ce4756 {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1738.548112] env[68040]: DEBUG oslo_concurrency.lockutils [req-2cc2d584-044b-4d9a-b43d-dc7ad0e9ef53 req-fe4f2d1c-5bf4-44f4-b9a9-974cddf621a9 service nova] Acquiring lock "0210d9d4-2161-4b06-bc81-9de361accca6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1738.548334] env[68040]: DEBUG oslo_concurrency.lockutils [req-2cc2d584-044b-4d9a-b43d-dc7ad0e9ef53 req-fe4f2d1c-5bf4-44f4-b9a9-974cddf621a9 service nova] Lock "0210d9d4-2161-4b06-bc81-9de361accca6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1738.548618] env[68040]: DEBUG oslo_concurrency.lockutils [req-2cc2d584-044b-4d9a-b43d-dc7ad0e9ef53 req-fe4f2d1c-5bf4-44f4-b9a9-974cddf621a9 service nova] Lock "0210d9d4-2161-4b06-bc81-9de361accca6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1738.548809] env[68040]: DEBUG nova.compute.manager [req-2cc2d584-044b-4d9a-b43d-dc7ad0e9ef53 req-fe4f2d1c-5bf4-44f4-b9a9-974cddf621a9 service nova] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] No waiting events found dispatching network-vif-plugged-54384cee-a218-4fe5-93e6-ae4e84ce4756 {{(pid=68040) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1738.549020] env[68040]: WARNING nova.compute.manager [req-2cc2d584-044b-4d9a-b43d-dc7ad0e9ef53 req-fe4f2d1c-5bf4-44f4-b9a9-974cddf621a9 service nova] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Received unexpected event network-vif-plugged-54384cee-a218-4fe5-93e6-ae4e84ce4756 for instance with vm_state building and task_state spawning. [ 1738.549162] env[68040]: DEBUG nova.compute.manager [req-2cc2d584-044b-4d9a-b43d-dc7ad0e9ef53 req-fe4f2d1c-5bf4-44f4-b9a9-974cddf621a9 service nova] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Received event network-changed-54384cee-a218-4fe5-93e6-ae4e84ce4756 {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1738.549307] env[68040]: DEBUG nova.compute.manager [req-2cc2d584-044b-4d9a-b43d-dc7ad0e9ef53 req-fe4f2d1c-5bf4-44f4-b9a9-974cddf621a9 service nova] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Refreshing instance network info cache due to event network-changed-54384cee-a218-4fe5-93e6-ae4e84ce4756. {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1738.549521] env[68040]: DEBUG oslo_concurrency.lockutils [req-2cc2d584-044b-4d9a-b43d-dc7ad0e9ef53 req-fe4f2d1c-5bf4-44f4-b9a9-974cddf621a9 service nova] Acquiring lock "refresh_cache-0210d9d4-2161-4b06-bc81-9de361accca6" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1738.549658] env[68040]: DEBUG oslo_concurrency.lockutils [req-2cc2d584-044b-4d9a-b43d-dc7ad0e9ef53 req-fe4f2d1c-5bf4-44f4-b9a9-974cddf621a9 service nova] Acquired lock "refresh_cache-0210d9d4-2161-4b06-bc81-9de361accca6" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1738.549829] env[68040]: DEBUG nova.network.neutron [req-2cc2d584-044b-4d9a-b43d-dc7ad0e9ef53 req-fe4f2d1c-5bf4-44f4-b9a9-974cddf621a9 service nova] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Refreshing network info cache for port 54384cee-a218-4fe5-93e6-ae4e84ce4756 {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1738.820983] env[68040]: DEBUG nova.network.neutron [req-2cc2d584-044b-4d9a-b43d-dc7ad0e9ef53 req-fe4f2d1c-5bf4-44f4-b9a9-974cddf621a9 service nova] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Updated VIF entry in instance network info cache for port 54384cee-a218-4fe5-93e6-ae4e84ce4756. {{(pid=68040) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1738.821347] env[68040]: DEBUG nova.network.neutron [req-2cc2d584-044b-4d9a-b43d-dc7ad0e9ef53 req-fe4f2d1c-5bf4-44f4-b9a9-974cddf621a9 service nova] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Updating instance_info_cache with network_info: [{"id": "54384cee-a218-4fe5-93e6-ae4e84ce4756", "address": "fa:16:3e:2e:59:be", "network": {"id": "9565e3df-4a40-4611-a5a9-efd2bc66053b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-780365588-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9a2c3ee9bf1c40228a089e4b0e5bff00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54384cee-a2", "ovs_interfaceid": "54384cee-a218-4fe5-93e6-ae4e84ce4756", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1738.830687] env[68040]: DEBUG oslo_concurrency.lockutils [req-2cc2d584-044b-4d9a-b43d-dc7ad0e9ef53 req-fe4f2d1c-5bf4-44f4-b9a9-974cddf621a9 service nova] Releasing lock "refresh_cache-0210d9d4-2161-4b06-bc81-9de361accca6" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1738.962704] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200316, 'name': CreateVM_Task, 'duration_secs': 0.267849} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1738.962862] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Created VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1738.963552] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1738.963718] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1738.964056] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1738.964305] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9e27a21-d155-450c-8f69-0fe02eccf4bc {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.968510] env[68040]: DEBUG oslo_vmware.api [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Waiting for the task: (returnval){ [ 1738.968510] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52f59faf-c613-3531-3e67-314dce9a4784" [ 1738.968510] env[68040]: _type = "Task" [ 1738.968510] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1738.975899] env[68040]: DEBUG oslo_vmware.api [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52f59faf-c613-3531-3e67-314dce9a4784, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.479139] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1739.479424] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Processing image 8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1739.479644] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1749.983653] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1753.984568] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1756.983879] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1756.997499] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1756.997735] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1756.997916] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1756.998087] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68040) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1756.999291] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26b6b8b5-4405-4f96-9168-78f9022ef3bb {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.008886] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e62cca8c-209e-463f-a361-48fcef8b7445 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.023357] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13063e92-a157-45d1-9ebc-3d36e21a80e6 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.029878] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90a4dd39-4e4b-4abc-bf1b-e3a1b5ac8968 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.060509] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180990MB free_disk=125GB free_vcpus=48 pci_devices=None {{(pid=68040) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1757.060659] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1757.060864] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1757.135079] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance c581d685-7ea0-41f8-b911-ff1dce1b46c7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1757.135247] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 4a08d3e3-5e84-4f34-b418-2c18eadbef25 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1757.135378] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 268b5613-b132-49ed-a45b-bc88132177cf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1757.135530] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 4dfa01f8-53a0-4ee4-9b00-93017144ea0b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1757.135656] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1757.135776] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1757.135895] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 221a5bbe-7168-4f5c-ab49-8a149545655f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1757.136022] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 031481de-d52f-4f3f-80e5-0d0d6803d624 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1757.136146] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 2e44ead1-4676-4d9b-bbae-5082f505fc8b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1757.136261] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 0210d9d4-2161-4b06-bc81-9de361accca6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1757.147978] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 8f9a6934-9ded-4561-8d83-aacd4d79f29a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1757.148215] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1757.148389] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1757.279550] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd5151bf-a25a-4a53-a744-6868748d52db {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.287546] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-604aec59-9513-444d-9a93-c99972437ebc {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.316574] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a598e35a-8278-4f5d-a8a1-f059af24c980 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.323548] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21d62956-47f8-4eef-af5a-10203e03a558 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.336089] env[68040]: DEBUG nova.compute.provider_tree [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1757.345546] env[68040]: DEBUG nova.scheduler.client.report [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1757.361796] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68040) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1757.362017] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.301s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1758.363051] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1758.363051] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Starting heal instance info cache {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1758.363051] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Rebuilding the list of instances to heal {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1758.382209] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1758.382377] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1758.382516] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1758.382645] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1758.382768] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1758.382891] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1758.383018] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1758.383143] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1758.383260] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1758.383405] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1758.383528] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Didn't find any instances for network info cache update. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1758.383973] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1758.384164] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1759.984857] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1760.984226] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1760.984466] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68040) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1761.979662] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1783.689649] env[68040]: WARNING oslo_vmware.rw_handles [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1783.689649] env[68040]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1783.689649] env[68040]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1783.689649] env[68040]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1783.689649] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1783.689649] env[68040]: ERROR oslo_vmware.rw_handles response.begin() [ 1783.689649] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1783.689649] env[68040]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1783.689649] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1783.689649] env[68040]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1783.689649] env[68040]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1783.689649] env[68040]: ERROR oslo_vmware.rw_handles [ 1783.690432] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Downloaded image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to vmware_temp/ba54d36e-e14d-43da-beeb-a757240f76b3/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1783.692250] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Caching image {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1783.692595] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Copying Virtual Disk [datastore2] vmware_temp/ba54d36e-e14d-43da-beeb-a757240f76b3/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk to [datastore2] vmware_temp/ba54d36e-e14d-43da-beeb-a757240f76b3/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk {{(pid=68040) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1783.692904] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-910c2cb5-e4a3-41b3-8932-a883b9dfe0ab {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.700598] env[68040]: DEBUG oslo_vmware.api [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Waiting for the task: (returnval){ [ 1783.700598] env[68040]: value = "task-3200317" [ 1783.700598] env[68040]: _type = "Task" [ 1783.700598] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1783.708115] env[68040]: DEBUG oslo_vmware.api [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Task: {'id': task-3200317, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1784.212049] env[68040]: DEBUG oslo_vmware.exceptions [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Fault InvalidArgument not matched. {{(pid=68040) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1784.212362] env[68040]: DEBUG oslo_concurrency.lockutils [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1784.213051] env[68040]: ERROR nova.compute.manager [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1784.213051] env[68040]: Faults: ['InvalidArgument'] [ 1784.213051] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Traceback (most recent call last): [ 1784.213051] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1784.213051] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] yield resources [ 1784.213051] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1784.213051] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] self.driver.spawn(context, instance, image_meta, [ 1784.213051] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1784.213051] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1784.213051] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1784.213051] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] self._fetch_image_if_missing(context, vi) [ 1784.213051] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1784.213051] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] image_cache(vi, tmp_image_ds_loc) [ 1784.213451] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1784.213451] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] vm_util.copy_virtual_disk( [ 1784.213451] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1784.213451] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] session._wait_for_task(vmdk_copy_task) [ 1784.213451] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1784.213451] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] return self.wait_for_task(task_ref) [ 1784.213451] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1784.213451] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] return evt.wait() [ 1784.213451] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1784.213451] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] result = hub.switch() [ 1784.213451] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1784.213451] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] return self.greenlet.switch() [ 1784.213451] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1784.213849] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] self.f(*self.args, **self.kw) [ 1784.213849] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1784.213849] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] raise exceptions.translate_fault(task_info.error) [ 1784.213849] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1784.213849] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Faults: ['InvalidArgument'] [ 1784.213849] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] [ 1784.213849] env[68040]: INFO nova.compute.manager [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Terminating instance [ 1784.215140] env[68040]: DEBUG oslo_concurrency.lockutils [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1784.215354] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1784.215601] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f243e959-e64c-4982-adc9-20afb3973c58 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.217819] env[68040]: DEBUG nova.compute.manager [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1784.218027] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1784.218797] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99562c1b-547b-4b46-98e9-8f261a0bcedf {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.225875] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Unregistering the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1784.226158] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-766c3a63-37a5-4368-9a2e-906fa1a3492b {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.228488] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1784.228669] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68040) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1784.229676] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ad5dc9d-46cc-476b-b8b0-e689ff7b1162 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.234168] env[68040]: DEBUG oslo_vmware.api [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Waiting for the task: (returnval){ [ 1784.234168] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]5258d717-20f0-a238-b681-be39c1b5b3f1" [ 1784.234168] env[68040]: _type = "Task" [ 1784.234168] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1784.241522] env[68040]: DEBUG oslo_vmware.api [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]5258d717-20f0-a238-b681-be39c1b5b3f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1784.294649] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Unregistered the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1784.294874] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Deleting contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1784.295060] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Deleting the datastore file [datastore2] c581d685-7ea0-41f8-b911-ff1dce1b46c7 {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1784.295357] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4852710a-d870-4319-81c8-cbbe501122bd {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.301242] env[68040]: DEBUG oslo_vmware.api [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Waiting for the task: (returnval){ [ 1784.301242] env[68040]: value = "task-3200319" [ 1784.301242] env[68040]: _type = "Task" [ 1784.301242] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1784.309428] env[68040]: DEBUG oslo_vmware.api [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Task: {'id': task-3200319, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1784.744135] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Preparing fetch location {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1784.744438] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Creating directory with path [datastore2] vmware_temp/94e657f0-5989-491a-9b7c-e8a203eb3e4b/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1784.744580] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5ea81d82-5da1-4312-bef8-12d834d049f4 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.755860] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Created directory with path [datastore2] vmware_temp/94e657f0-5989-491a-9b7c-e8a203eb3e4b/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1784.756074] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Fetch image to [datastore2] vmware_temp/94e657f0-5989-491a-9b7c-e8a203eb3e4b/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1784.756254] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to [datastore2] vmware_temp/94e657f0-5989-491a-9b7c-e8a203eb3e4b/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1784.756980] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dc4ae04-9734-4eda-8a43-4d5ecef466b7 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.763452] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f9f83e8-eb62-4441-b643-363ad849197d {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.772332] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54f37bfa-d807-4819-9860-b9dfb919b175 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.806041] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc6f94b3-fe2c-4d18-8a85-453bb12d8aed {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.813300] env[68040]: DEBUG oslo_vmware.api [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Task: {'id': task-3200319, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.080816} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1784.814678] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Deleted the datastore file {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1784.814877] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Deleted contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1784.815058] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1784.815239] env[68040]: INFO nova.compute.manager [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1784.816974] env[68040]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-27790047-e8a6-45be-85bb-a1ca7b377379 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.818775] env[68040]: DEBUG nova.compute.claims [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Aborting claim: {{(pid=68040) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1784.818949] env[68040]: DEBUG oslo_concurrency.lockutils [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1784.819174] env[68040]: DEBUG oslo_concurrency.lockutils [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1784.841740] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1784.901892] env[68040]: DEBUG oslo_vmware.rw_handles [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/94e657f0-5989-491a-9b7c-e8a203eb3e4b/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1784.970386] env[68040]: DEBUG oslo_vmware.rw_handles [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Completed reading data from the image iterator. {{(pid=68040) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1784.970606] env[68040]: DEBUG oslo_vmware.rw_handles [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/94e657f0-5989-491a-9b7c-e8a203eb3e4b/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1785.061474] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-533eb899-7278-4fda-a8cf-a1f88847aae2 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.068999] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-071e5b4e-fd26-45c5-90ca-2e839a8d76e9 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.101916] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f872405b-7cd7-4787-9559-c2294e5c902f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.108992] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aef7163-d799-4578-bfd5-bfc43996d7e5 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.122887] env[68040]: DEBUG nova.compute.provider_tree [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1785.131204] env[68040]: DEBUG nova.scheduler.client.report [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1785.147588] env[68040]: DEBUG oslo_concurrency.lockutils [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.328s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1785.148143] env[68040]: ERROR nova.compute.manager [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1785.148143] env[68040]: Faults: ['InvalidArgument'] [ 1785.148143] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Traceback (most recent call last): [ 1785.148143] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1785.148143] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] self.driver.spawn(context, instance, image_meta, [ 1785.148143] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1785.148143] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1785.148143] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1785.148143] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] self._fetch_image_if_missing(context, vi) [ 1785.148143] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1785.148143] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] image_cache(vi, tmp_image_ds_loc) [ 1785.148143] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1785.148783] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] vm_util.copy_virtual_disk( [ 1785.148783] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1785.148783] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] session._wait_for_task(vmdk_copy_task) [ 1785.148783] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1785.148783] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] return self.wait_for_task(task_ref) [ 1785.148783] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1785.148783] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] return evt.wait() [ 1785.148783] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1785.148783] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] result = hub.switch() [ 1785.148783] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1785.148783] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] return self.greenlet.switch() [ 1785.148783] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1785.148783] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] self.f(*self.args, **self.kw) [ 1785.149441] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1785.149441] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] raise exceptions.translate_fault(task_info.error) [ 1785.149441] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1785.149441] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Faults: ['InvalidArgument'] [ 1785.149441] env[68040]: ERROR nova.compute.manager [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] [ 1785.149441] env[68040]: DEBUG nova.compute.utils [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] VimFaultException {{(pid=68040) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1785.150373] env[68040]: DEBUG nova.compute.manager [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Build of instance c581d685-7ea0-41f8-b911-ff1dce1b46c7 was re-scheduled: A specified parameter was not correct: fileType [ 1785.150373] env[68040]: Faults: ['InvalidArgument'] {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1785.150745] env[68040]: DEBUG nova.compute.manager [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Unplugging VIFs for instance {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1785.150921] env[68040]: DEBUG nova.compute.manager [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1785.151107] env[68040]: DEBUG nova.compute.manager [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1785.151273] env[68040]: DEBUG nova.network.neutron [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1785.439620] env[68040]: DEBUG nova.network.neutron [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1785.452067] env[68040]: INFO nova.compute.manager [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Took 0.30 seconds to deallocate network for instance. [ 1785.552741] env[68040]: INFO nova.scheduler.client.report [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Deleted allocations for instance c581d685-7ea0-41f8-b911-ff1dce1b46c7 [ 1785.575045] env[68040]: DEBUG oslo_concurrency.lockutils [None req-f029ebc0-b839-4f4a-bb71-9a87b723c7e0 tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Lock "c581d685-7ea0-41f8-b911-ff1dce1b46c7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 673.409s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1785.576143] env[68040]: DEBUG oslo_concurrency.lockutils [None req-12c94775-7ff4-4860-8da5-7478cc755a1d tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Lock "c581d685-7ea0-41f8-b911-ff1dce1b46c7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 476.318s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1785.576622] env[68040]: DEBUG oslo_concurrency.lockutils [None req-12c94775-7ff4-4860-8da5-7478cc755a1d tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Acquiring lock "c581d685-7ea0-41f8-b911-ff1dce1b46c7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1785.576672] env[68040]: DEBUG oslo_concurrency.lockutils [None req-12c94775-7ff4-4860-8da5-7478cc755a1d tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Lock "c581d685-7ea0-41f8-b911-ff1dce1b46c7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1785.576867] env[68040]: DEBUG oslo_concurrency.lockutils [None req-12c94775-7ff4-4860-8da5-7478cc755a1d tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Lock "c581d685-7ea0-41f8-b911-ff1dce1b46c7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1785.581008] env[68040]: INFO nova.compute.manager [None req-12c94775-7ff4-4860-8da5-7478cc755a1d tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Terminating instance [ 1785.582802] env[68040]: DEBUG nova.compute.manager [None req-12c94775-7ff4-4860-8da5-7478cc755a1d tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1785.583017] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-12c94775-7ff4-4860-8da5-7478cc755a1d tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1785.583281] env[68040]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-951b7891-b944-4856-94a3-fcd70d9cfe55 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.586390] env[68040]: DEBUG nova.compute.manager [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1785.595695] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-456167f1-7ba1-44a0-826d-775bf4061b81 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.624957] env[68040]: WARNING nova.virt.vmwareapi.vmops [None req-12c94775-7ff4-4860-8da5-7478cc755a1d tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c581d685-7ea0-41f8-b911-ff1dce1b46c7 could not be found. [ 1785.625170] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-12c94775-7ff4-4860-8da5-7478cc755a1d tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1785.625384] env[68040]: INFO nova.compute.manager [None req-12c94775-7ff4-4860-8da5-7478cc755a1d tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1785.625652] env[68040]: DEBUG oslo.service.loopingcall [None req-12c94775-7ff4-4860-8da5-7478cc755a1d tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1785.626424] env[68040]: DEBUG nova.compute.manager [-] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1785.626530] env[68040]: DEBUG nova.network.neutron [-] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1785.642189] env[68040]: DEBUG oslo_concurrency.lockutils [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1785.642449] env[68040]: DEBUG oslo_concurrency.lockutils [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1785.643955] env[68040]: INFO nova.compute.claims [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1785.655815] env[68040]: DEBUG nova.network.neutron [-] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1785.665414] env[68040]: INFO nova.compute.manager [-] [instance: c581d685-7ea0-41f8-b911-ff1dce1b46c7] Took 0.04 seconds to deallocate network for instance. [ 1785.744568] env[68040]: DEBUG oslo_concurrency.lockutils [None req-12c94775-7ff4-4860-8da5-7478cc755a1d tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Lock "c581d685-7ea0-41f8-b911-ff1dce1b46c7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.168s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1785.815295] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f19853c-7f99-483d-956d-66d63ef96482 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.822526] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0099826c-19d1-413c-99d1-1b32404ad6a7 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.851292] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d050922e-b3fb-42c2-8598-aa7678c92fb4 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.858255] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cb03b15-2ca5-4800-a694-5b4ea2145fc7 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.871013] env[68040]: DEBUG nova.compute.provider_tree [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1785.879883] env[68040]: DEBUG nova.scheduler.client.report [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1785.892890] env[68040]: DEBUG oslo_concurrency.lockutils [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.250s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1785.893261] env[68040]: DEBUG nova.compute.manager [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Start building networks asynchronously for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1785.925238] env[68040]: DEBUG nova.compute.utils [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Using /dev/sd instead of None {{(pid=68040) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1785.926609] env[68040]: DEBUG nova.compute.manager [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Allocating IP information in the background. {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1785.926786] env[68040]: DEBUG nova.network.neutron [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] allocate_for_instance() {{(pid=68040) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1785.935584] env[68040]: DEBUG nova.compute.manager [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Start building block device mappings for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1785.981461] env[68040]: DEBUG nova.policy [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '652d6d4ab4b34f1c83b383eebb03802d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b900f6609c8b4a06a854334970969e08', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68040) authorize /opt/stack/nova/nova/policy.py:203}} [ 1786.002905] env[68040]: DEBUG nova.compute.manager [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Start spawning the instance on the hypervisor. {{(pid=68040) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1786.028288] env[68040]: DEBUG nova.virt.hardware [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:59:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:59:33Z,direct_url=,disk_format='vmdk',id=8c308313-03d5-40b6-a5fe-9037e32dc76e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0770d674a39c40089de0aade9440b370',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:59:34Z,virtual_size=,visibility=), allow threads: False {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1786.028536] env[68040]: DEBUG nova.virt.hardware [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Flavor limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1786.028697] env[68040]: DEBUG nova.virt.hardware [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Image limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1786.028882] env[68040]: DEBUG nova.virt.hardware [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Flavor pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1786.029039] env[68040]: DEBUG nova.virt.hardware [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Image pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1786.029191] env[68040]: DEBUG nova.virt.hardware [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1786.029402] env[68040]: DEBUG nova.virt.hardware [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1786.029571] env[68040]: DEBUG nova.virt.hardware [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1786.029741] env[68040]: DEBUG nova.virt.hardware [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Got 1 possible topologies {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1786.029907] env[68040]: DEBUG nova.virt.hardware [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1786.030101] env[68040]: DEBUG nova.virt.hardware [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1786.031156] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfdbe654-7e70-41fa-8fc9-48168fb2d8f5 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.038810] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdf2cfd5-9e71-4473-be45-ee535c2f3a06 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.529176] env[68040]: DEBUG nova.network.neutron [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Successfully created port: 6b9854c6-e640-4727-a01f-7b3035dbd436 {{(pid=68040) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1787.232260] env[68040]: DEBUG nova.network.neutron [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Successfully updated port: 6b9854c6-e640-4727-a01f-7b3035dbd436 {{(pid=68040) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1787.245098] env[68040]: DEBUG oslo_concurrency.lockutils [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Acquiring lock "refresh_cache-8f9a6934-9ded-4561-8d83-aacd4d79f29a" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1787.245267] env[68040]: DEBUG oslo_concurrency.lockutils [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Acquired lock "refresh_cache-8f9a6934-9ded-4561-8d83-aacd4d79f29a" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1787.245424] env[68040]: DEBUG nova.network.neutron [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Building network info cache for instance {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1787.304072] env[68040]: DEBUG nova.network.neutron [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1787.803513] env[68040]: DEBUG nova.network.neutron [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Updating instance_info_cache with network_info: [{"id": "6b9854c6-e640-4727-a01f-7b3035dbd436", "address": "fa:16:3e:9f:23:b5", "network": {"id": "8ce695dd-c82f-410f-a4e5-f236089de141", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-716880134-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b900f6609c8b4a06a854334970969e08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d689fd7-f53e-4fd3-80d9-8d6b8fb7a164", "external-id": "nsx-vlan-transportzone-972", "segmentation_id": 972, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b9854c6-e6", "ovs_interfaceid": "6b9854c6-e640-4727-a01f-7b3035dbd436", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1787.816803] env[68040]: DEBUG oslo_concurrency.lockutils [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Releasing lock "refresh_cache-8f9a6934-9ded-4561-8d83-aacd4d79f29a" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1787.817096] env[68040]: DEBUG nova.compute.manager [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Instance network_info: |[{"id": "6b9854c6-e640-4727-a01f-7b3035dbd436", "address": "fa:16:3e:9f:23:b5", "network": {"id": "8ce695dd-c82f-410f-a4e5-f236089de141", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-716880134-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b900f6609c8b4a06a854334970969e08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d689fd7-f53e-4fd3-80d9-8d6b8fb7a164", "external-id": "nsx-vlan-transportzone-972", "segmentation_id": 972, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b9854c6-e6", "ovs_interfaceid": "6b9854c6-e640-4727-a01f-7b3035dbd436", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1787.817511] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9f:23:b5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7d689fd7-f53e-4fd3-80d9-8d6b8fb7a164', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6b9854c6-e640-4727-a01f-7b3035dbd436', 'vif_model': 'vmxnet3'}] {{(pid=68040) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1787.824979] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Creating folder: Project (b900f6609c8b4a06a854334970969e08). Parent ref: group-v639956. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1787.825852] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5844547a-32c5-4f80-b743-e96a021d18a2 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.836429] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Created folder: Project (b900f6609c8b4a06a854334970969e08) in parent group-v639956. [ 1787.836603] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Creating folder: Instances. Parent ref: group-v640052. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1787.836833] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3847842c-42db-466e-99f4-46cc495b1df2 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.844384] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Created folder: Instances in parent group-v640052. [ 1787.844522] env[68040]: DEBUG oslo.service.loopingcall [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1787.844706] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Creating VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1787.844877] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-69518b02-09cf-4551-b066-f2e73ded59ba {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.862396] env[68040]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1787.862396] env[68040]: value = "task-3200322" [ 1787.862396] env[68040]: _type = "Task" [ 1787.862396] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1787.869499] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200322, 'name': CreateVM_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1787.925043] env[68040]: DEBUG nova.compute.manager [req-5f26f240-800f-4fc6-afa8-9158712efad9 req-11a4498a-b787-49cb-bf51-d448b90ca47b service nova] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Received event network-vif-plugged-6b9854c6-e640-4727-a01f-7b3035dbd436 {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1787.925262] env[68040]: DEBUG oslo_concurrency.lockutils [req-5f26f240-800f-4fc6-afa8-9158712efad9 req-11a4498a-b787-49cb-bf51-d448b90ca47b service nova] Acquiring lock "8f9a6934-9ded-4561-8d83-aacd4d79f29a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1787.925543] env[68040]: DEBUG oslo_concurrency.lockutils [req-5f26f240-800f-4fc6-afa8-9158712efad9 req-11a4498a-b787-49cb-bf51-d448b90ca47b service nova] Lock "8f9a6934-9ded-4561-8d83-aacd4d79f29a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1787.925725] env[68040]: DEBUG oslo_concurrency.lockutils [req-5f26f240-800f-4fc6-afa8-9158712efad9 req-11a4498a-b787-49cb-bf51-d448b90ca47b service nova] Lock "8f9a6934-9ded-4561-8d83-aacd4d79f29a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1787.925812] env[68040]: DEBUG nova.compute.manager [req-5f26f240-800f-4fc6-afa8-9158712efad9 req-11a4498a-b787-49cb-bf51-d448b90ca47b service nova] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] No waiting events found dispatching network-vif-plugged-6b9854c6-e640-4727-a01f-7b3035dbd436 {{(pid=68040) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1787.925977] env[68040]: WARNING nova.compute.manager [req-5f26f240-800f-4fc6-afa8-9158712efad9 req-11a4498a-b787-49cb-bf51-d448b90ca47b service nova] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Received unexpected event network-vif-plugged-6b9854c6-e640-4727-a01f-7b3035dbd436 for instance with vm_state building and task_state spawning. [ 1787.926157] env[68040]: DEBUG nova.compute.manager [req-5f26f240-800f-4fc6-afa8-9158712efad9 req-11a4498a-b787-49cb-bf51-d448b90ca47b service nova] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Received event network-changed-6b9854c6-e640-4727-a01f-7b3035dbd436 {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1787.926312] env[68040]: DEBUG nova.compute.manager [req-5f26f240-800f-4fc6-afa8-9158712efad9 req-11a4498a-b787-49cb-bf51-d448b90ca47b service nova] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Refreshing instance network info cache due to event network-changed-6b9854c6-e640-4727-a01f-7b3035dbd436. {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1787.926492] env[68040]: DEBUG oslo_concurrency.lockutils [req-5f26f240-800f-4fc6-afa8-9158712efad9 req-11a4498a-b787-49cb-bf51-d448b90ca47b service nova] Acquiring lock "refresh_cache-8f9a6934-9ded-4561-8d83-aacd4d79f29a" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1787.926631] env[68040]: DEBUG oslo_concurrency.lockutils [req-5f26f240-800f-4fc6-afa8-9158712efad9 req-11a4498a-b787-49cb-bf51-d448b90ca47b service nova] Acquired lock "refresh_cache-8f9a6934-9ded-4561-8d83-aacd4d79f29a" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1787.926814] env[68040]: DEBUG nova.network.neutron [req-5f26f240-800f-4fc6-afa8-9158712efad9 req-11a4498a-b787-49cb-bf51-d448b90ca47b service nova] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Refreshing network info cache for port 6b9854c6-e640-4727-a01f-7b3035dbd436 {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1788.253803] env[68040]: DEBUG nova.network.neutron [req-5f26f240-800f-4fc6-afa8-9158712efad9 req-11a4498a-b787-49cb-bf51-d448b90ca47b service nova] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Updated VIF entry in instance network info cache for port 6b9854c6-e640-4727-a01f-7b3035dbd436. {{(pid=68040) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1788.254269] env[68040]: DEBUG nova.network.neutron [req-5f26f240-800f-4fc6-afa8-9158712efad9 req-11a4498a-b787-49cb-bf51-d448b90ca47b service nova] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Updating instance_info_cache with network_info: [{"id": "6b9854c6-e640-4727-a01f-7b3035dbd436", "address": "fa:16:3e:9f:23:b5", "network": {"id": "8ce695dd-c82f-410f-a4e5-f236089de141", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-716880134-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b900f6609c8b4a06a854334970969e08", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d689fd7-f53e-4fd3-80d9-8d6b8fb7a164", "external-id": "nsx-vlan-transportzone-972", "segmentation_id": 972, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b9854c6-e6", "ovs_interfaceid": "6b9854c6-e640-4727-a01f-7b3035dbd436", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1788.264076] env[68040]: DEBUG oslo_concurrency.lockutils [req-5f26f240-800f-4fc6-afa8-9158712efad9 req-11a4498a-b787-49cb-bf51-d448b90ca47b service nova] Releasing lock "refresh_cache-8f9a6934-9ded-4561-8d83-aacd4d79f29a" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1788.372200] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200322, 'name': CreateVM_Task, 'duration_secs': 0.292745} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1788.372359] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Created VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1788.373084] env[68040]: DEBUG oslo_concurrency.lockutils [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1788.373253] env[68040]: DEBUG oslo_concurrency.lockutils [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1788.373698] env[68040]: DEBUG oslo_concurrency.lockutils [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1788.373985] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6de04c6a-8953-4782-913c-0b539b92be05 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.378207] env[68040]: DEBUG oslo_vmware.api [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Waiting for the task: (returnval){ [ 1788.378207] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]528ebf69-ed38-e337-ca19-53fe49fa10d5" [ 1788.378207] env[68040]: _type = "Task" [ 1788.378207] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1788.385475] env[68040]: DEBUG oslo_vmware.api [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]528ebf69-ed38-e337-ca19-53fe49fa10d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.888046] env[68040]: DEBUG oslo_concurrency.lockutils [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1788.888278] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Processing image 8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1788.888492] env[68040]: DEBUG oslo_concurrency.lockutils [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1791.841314] env[68040]: DEBUG oslo_concurrency.lockutils [None req-32eb3e75-d6d3-4bc4-95a0-2b133d158a51 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Acquiring lock "0210d9d4-2161-4b06-bc81-9de361accca6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1809.984562] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1813.985164] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1817.984513] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1817.984858] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Starting heal instance info cache {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1817.984858] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Rebuilding the list of instances to heal {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1818.008165] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1818.008336] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1818.008470] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1818.008597] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1818.008721] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1818.008845] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1818.008967] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1818.009154] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1818.009312] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1818.009434] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1818.009555] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Didn't find any instances for network info cache update. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1818.984153] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1818.984471] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1818.996726] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1818.997159] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1818.997328] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1818.997554] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68040) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1818.998762] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8480ebd9-fdca-4fab-b376-aeaa9e83c227 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.007897] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2650b183-45a5-4fff-a7a0-e7b8fa817a40 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.024118] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-634f694a-2284-4dd8-9428-a5fba3e5d243 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.030902] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6808ad78-257b-4bb9-bec0-8e45dc2114fe {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.059275] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181002MB free_disk=125GB free_vcpus=48 pci_devices=None {{(pid=68040) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1819.059497] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1819.059772] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1819.203169] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 4a08d3e3-5e84-4f34-b418-2c18eadbef25 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1819.203387] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 268b5613-b132-49ed-a45b-bc88132177cf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1819.203532] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 4dfa01f8-53a0-4ee4-9b00-93017144ea0b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1819.203711] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1819.203842] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1819.203975] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 221a5bbe-7168-4f5c-ab49-8a149545655f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1819.204109] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 031481de-d52f-4f3f-80e5-0d0d6803d624 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1819.204255] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 2e44ead1-4676-4d9b-bbae-5082f505fc8b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1819.204373] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 0210d9d4-2161-4b06-bc81-9de361accca6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1819.204492] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 8f9a6934-9ded-4561-8d83-aacd4d79f29a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1819.204690] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1819.204831] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1819.331397] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-947c8795-8da0-43db-8ba2-1a12a725fd53 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.339219] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e06aff06-5ff8-4db3-bcde-ff5430a81869 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.368192] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09f40007-f761-4624-beba-88a21db8f769 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.375436] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6df908b-cc87-4230-9dea-b9f2ebe59dc9 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.388165] env[68040]: DEBUG nova.compute.provider_tree [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1819.396443] env[68040]: DEBUG nova.scheduler.client.report [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1819.410072] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68040) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1819.410267] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.351s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1820.410309] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1820.984140] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1820.984140] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1820.984140] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68040) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1823.979562] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1823.983251] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1823.983459] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Cleaning up deleted instances {{(pid=68040) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11219}} [ 1823.992811] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] There are 0 instances to clean {{(pid=68040) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 1823.993055] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1823.993237] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Cleaning up deleted instances with incomplete migration {{(pid=68040) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11257}} [ 1826.999511] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1827.985073] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1834.931249] env[68040]: WARNING oslo_vmware.rw_handles [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1834.931249] env[68040]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1834.931249] env[68040]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1834.931249] env[68040]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1834.931249] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1834.931249] env[68040]: ERROR oslo_vmware.rw_handles response.begin() [ 1834.931249] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1834.931249] env[68040]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1834.931249] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1834.931249] env[68040]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1834.931249] env[68040]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1834.931249] env[68040]: ERROR oslo_vmware.rw_handles [ 1834.931878] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Downloaded image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to vmware_temp/94e657f0-5989-491a-9b7c-e8a203eb3e4b/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1834.933776] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Caching image {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1834.934044] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Copying Virtual Disk [datastore2] vmware_temp/94e657f0-5989-491a-9b7c-e8a203eb3e4b/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk to [datastore2] vmware_temp/94e657f0-5989-491a-9b7c-e8a203eb3e4b/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk {{(pid=68040) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1834.934346] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ba791e6f-4380-4e88-861a-e8138b5b19b0 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.942421] env[68040]: DEBUG oslo_vmware.api [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Waiting for the task: (returnval){ [ 1834.942421] env[68040]: value = "task-3200323" [ 1834.942421] env[68040]: _type = "Task" [ 1834.942421] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1834.950294] env[68040]: DEBUG oslo_vmware.api [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Task: {'id': task-3200323, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.148354] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._sync_power_states {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1835.168263] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Getting list of instances from cluster (obj){ [ 1835.168263] env[68040]: value = "domain-c8" [ 1835.168263] env[68040]: _type = "ClusterComputeResource" [ 1835.168263] env[68040]: } {{(pid=68040) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1835.169516] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-434638d1-a2be-47e5-bd49-fd3062196dd5 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.186048] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Got total of 10 instances {{(pid=68040) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1835.186216] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Triggering sync for uuid 4a08d3e3-5e84-4f34-b418-2c18eadbef25 {{(pid=68040) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1835.186408] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Triggering sync for uuid 268b5613-b132-49ed-a45b-bc88132177cf {{(pid=68040) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1835.186570] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Triggering sync for uuid 4dfa01f8-53a0-4ee4-9b00-93017144ea0b {{(pid=68040) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1835.186722] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Triggering sync for uuid f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2 {{(pid=68040) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1835.186876] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Triggering sync for uuid c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a {{(pid=68040) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1835.187038] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Triggering sync for uuid 221a5bbe-7168-4f5c-ab49-8a149545655f {{(pid=68040) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1835.187199] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Triggering sync for uuid 031481de-d52f-4f3f-80e5-0d0d6803d624 {{(pid=68040) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1835.187348] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Triggering sync for uuid 2e44ead1-4676-4d9b-bbae-5082f505fc8b {{(pid=68040) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1835.187495] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Triggering sync for uuid 0210d9d4-2161-4b06-bc81-9de361accca6 {{(pid=68040) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1835.187641] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Triggering sync for uuid 8f9a6934-9ded-4561-8d83-aacd4d79f29a {{(pid=68040) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1835.187944] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "4a08d3e3-5e84-4f34-b418-2c18eadbef25" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1835.188189] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "268b5613-b132-49ed-a45b-bc88132177cf" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1835.188393] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "4dfa01f8-53a0-4ee4-9b00-93017144ea0b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1835.188590] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1835.188800] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1835.189007] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "221a5bbe-7168-4f5c-ab49-8a149545655f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1835.189222] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "031481de-d52f-4f3f-80e5-0d0d6803d624" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1835.189417] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "2e44ead1-4676-4d9b-bbae-5082f505fc8b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1835.189609] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "0210d9d4-2161-4b06-bc81-9de361accca6" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1835.189798] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "8f9a6934-9ded-4561-8d83-aacd4d79f29a" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1835.452243] env[68040]: DEBUG oslo_vmware.exceptions [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Fault InvalidArgument not matched. {{(pid=68040) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1835.452523] env[68040]: DEBUG oslo_concurrency.lockutils [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1835.453102] env[68040]: ERROR nova.compute.manager [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1835.453102] env[68040]: Faults: ['InvalidArgument'] [ 1835.453102] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Traceback (most recent call last): [ 1835.453102] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1835.453102] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] yield resources [ 1835.453102] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1835.453102] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] self.driver.spawn(context, instance, image_meta, [ 1835.453102] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1835.453102] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1835.453102] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1835.453102] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] self._fetch_image_if_missing(context, vi) [ 1835.453102] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1835.453505] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] image_cache(vi, tmp_image_ds_loc) [ 1835.453505] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1835.453505] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] vm_util.copy_virtual_disk( [ 1835.453505] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1835.453505] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] session._wait_for_task(vmdk_copy_task) [ 1835.453505] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1835.453505] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] return self.wait_for_task(task_ref) [ 1835.453505] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1835.453505] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] return evt.wait() [ 1835.453505] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1835.453505] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] result = hub.switch() [ 1835.453505] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1835.453505] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] return self.greenlet.switch() [ 1835.453916] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1835.453916] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] self.f(*self.args, **self.kw) [ 1835.453916] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1835.453916] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] raise exceptions.translate_fault(task_info.error) [ 1835.453916] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1835.453916] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Faults: ['InvalidArgument'] [ 1835.453916] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] [ 1835.453916] env[68040]: INFO nova.compute.manager [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Terminating instance [ 1835.454929] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1835.455158] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1835.455378] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-425f087f-8ef5-444d-9a66-c5b1f24aa221 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.457602] env[68040]: DEBUG nova.compute.manager [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1835.457795] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1835.458499] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bcc1390-e081-4b91-ac97-2c3390576045 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.464991] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Unregistering the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1835.465210] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1c16043d-ff5f-40c6-bd9f-07b87be3658f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.467174] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1835.467349] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68040) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1835.468244] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a03a9780-a1db-4637-a575-ebd602bfac21 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.472820] env[68040]: DEBUG oslo_vmware.api [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Waiting for the task: (returnval){ [ 1835.472820] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52dd2f22-3211-e3ce-6724-678bb9c4787a" [ 1835.472820] env[68040]: _type = "Task" [ 1835.472820] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1835.479642] env[68040]: DEBUG oslo_vmware.api [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52dd2f22-3211-e3ce-6724-678bb9c4787a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.536758] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Unregistered the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1835.536967] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Deleting contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1835.537173] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Deleting the datastore file [datastore2] 4a08d3e3-5e84-4f34-b418-2c18eadbef25 {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1835.537438] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9b5f3451-67ef-4d00-9cc2-fd0ad247b09a {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.544504] env[68040]: DEBUG oslo_vmware.api [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Waiting for the task: (returnval){ [ 1835.544504] env[68040]: value = "task-3200325" [ 1835.544504] env[68040]: _type = "Task" [ 1835.544504] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1835.552013] env[68040]: DEBUG oslo_vmware.api [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Task: {'id': task-3200325, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.983376] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Preparing fetch location {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1835.983705] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Creating directory with path [datastore2] vmware_temp/4343e44a-385d-4048-9c1c-4919b9f0dd67/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1835.983872] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-45b932fa-b4b9-4100-8578-cc345cbe7fe2 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.993764] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Created directory with path [datastore2] vmware_temp/4343e44a-385d-4048-9c1c-4919b9f0dd67/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1835.993943] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Fetch image to [datastore2] vmware_temp/4343e44a-385d-4048-9c1c-4919b9f0dd67/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1835.994128] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to [datastore2] vmware_temp/4343e44a-385d-4048-9c1c-4919b9f0dd67/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1835.994838] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b158059-f7a5-400d-b660-dad7ef4e8b4f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.001216] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-240cc3f2-f115-45f5-a94f-dedf9f99597a {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.011292] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86ff8089-ad8a-4c2a-afc5-0ab9dce509e9 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.041227] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fd1cf30-3a69-4495-89db-9ce87b5f5900 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.049815] env[68040]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-b46137c4-b47c-4ed7-9418-857e657cafb2 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.054106] env[68040]: DEBUG oslo_vmware.api [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Task: {'id': task-3200325, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.065626} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1836.054622] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Deleted the datastore file {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1836.054812] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Deleted contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1836.054989] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1836.055208] env[68040]: INFO nova.compute.manager [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1836.057301] env[68040]: DEBUG nova.compute.claims [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Aborting claim: {{(pid=68040) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1836.057473] env[68040]: DEBUG oslo_concurrency.lockutils [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1836.057686] env[68040]: DEBUG oslo_concurrency.lockutils [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1836.077065] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1836.127513] env[68040]: DEBUG oslo_vmware.rw_handles [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4343e44a-385d-4048-9c1c-4919b9f0dd67/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1836.188945] env[68040]: DEBUG oslo_vmware.rw_handles [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Completed reading data from the image iterator. {{(pid=68040) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1836.189167] env[68040]: DEBUG oslo_vmware.rw_handles [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4343e44a-385d-4048-9c1c-4919b9f0dd67/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1836.277587] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecfe1944-5e74-4598-abcc-0049eafebdda {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.285249] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adc4998d-d38e-4961-bdce-7e9ff478f75c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.315135] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f87a82d3-b25b-4987-83bf-58a934c38541 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.321927] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a494dca-bd8c-44e8-8a07-61380a29d30d {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.334775] env[68040]: DEBUG nova.compute.provider_tree [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1836.343335] env[68040]: DEBUG nova.scheduler.client.report [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1836.356380] env[68040]: DEBUG oslo_concurrency.lockutils [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.299s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1836.356888] env[68040]: ERROR nova.compute.manager [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1836.356888] env[68040]: Faults: ['InvalidArgument'] [ 1836.356888] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Traceback (most recent call last): [ 1836.356888] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1836.356888] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] self.driver.spawn(context, instance, image_meta, [ 1836.356888] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1836.356888] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1836.356888] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1836.356888] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] self._fetch_image_if_missing(context, vi) [ 1836.356888] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1836.356888] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] image_cache(vi, tmp_image_ds_loc) [ 1836.356888] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1836.357249] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] vm_util.copy_virtual_disk( [ 1836.357249] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1836.357249] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] session._wait_for_task(vmdk_copy_task) [ 1836.357249] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1836.357249] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] return self.wait_for_task(task_ref) [ 1836.357249] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1836.357249] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] return evt.wait() [ 1836.357249] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1836.357249] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] result = hub.switch() [ 1836.357249] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1836.357249] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] return self.greenlet.switch() [ 1836.357249] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1836.357249] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] self.f(*self.args, **self.kw) [ 1836.357632] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1836.357632] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] raise exceptions.translate_fault(task_info.error) [ 1836.357632] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1836.357632] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Faults: ['InvalidArgument'] [ 1836.357632] env[68040]: ERROR nova.compute.manager [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] [ 1836.357632] env[68040]: DEBUG nova.compute.utils [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] VimFaultException {{(pid=68040) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1836.358875] env[68040]: DEBUG nova.compute.manager [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Build of instance 4a08d3e3-5e84-4f34-b418-2c18eadbef25 was re-scheduled: A specified parameter was not correct: fileType [ 1836.358875] env[68040]: Faults: ['InvalidArgument'] {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1836.359260] env[68040]: DEBUG nova.compute.manager [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Unplugging VIFs for instance {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1836.359434] env[68040]: DEBUG nova.compute.manager [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1836.359605] env[68040]: DEBUG nova.compute.manager [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1836.359772] env[68040]: DEBUG nova.network.neutron [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1836.846956] env[68040]: DEBUG nova.network.neutron [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1836.862330] env[68040]: INFO nova.compute.manager [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Took 0.50 seconds to deallocate network for instance. [ 1836.964238] env[68040]: INFO nova.scheduler.client.report [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Deleted allocations for instance 4a08d3e3-5e84-4f34-b418-2c18eadbef25 [ 1836.984937] env[68040]: DEBUG oslo_concurrency.lockutils [None req-1ead069c-ae06-4d50-8642-5efbc80b7408 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Lock "4a08d3e3-5e84-4f34-b418-2c18eadbef25" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 633.737s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1836.985239] env[68040]: DEBUG oslo_concurrency.lockutils [None req-5bde59f3-ebee-405d-8b32-34c2acad2712 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Lock "4a08d3e3-5e84-4f34-b418-2c18eadbef25" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 438.640s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1836.985413] env[68040]: DEBUG oslo_concurrency.lockutils [None req-5bde59f3-ebee-405d-8b32-34c2acad2712 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Acquiring lock "4a08d3e3-5e84-4f34-b418-2c18eadbef25-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1836.985621] env[68040]: DEBUG oslo_concurrency.lockutils [None req-5bde59f3-ebee-405d-8b32-34c2acad2712 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Lock "4a08d3e3-5e84-4f34-b418-2c18eadbef25-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1836.985791] env[68040]: DEBUG oslo_concurrency.lockutils [None req-5bde59f3-ebee-405d-8b32-34c2acad2712 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Lock "4a08d3e3-5e84-4f34-b418-2c18eadbef25-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1836.987710] env[68040]: INFO nova.compute.manager [None req-5bde59f3-ebee-405d-8b32-34c2acad2712 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Terminating instance [ 1836.989340] env[68040]: DEBUG nova.compute.manager [None req-5bde59f3-ebee-405d-8b32-34c2acad2712 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1836.989537] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-5bde59f3-ebee-405d-8b32-34c2acad2712 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1836.989987] env[68040]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-86f0b115-30df-4c9f-8739-b2bd333d6472 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.998836] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07003a9f-3b59-4246-8e21-3816e20efa06 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.028057] env[68040]: WARNING nova.virt.vmwareapi.vmops [None req-5bde59f3-ebee-405d-8b32-34c2acad2712 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4a08d3e3-5e84-4f34-b418-2c18eadbef25 could not be found. [ 1837.028272] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-5bde59f3-ebee-405d-8b32-34c2acad2712 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1837.028452] env[68040]: INFO nova.compute.manager [None req-5bde59f3-ebee-405d-8b32-34c2acad2712 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1837.028865] env[68040]: DEBUG oslo.service.loopingcall [None req-5bde59f3-ebee-405d-8b32-34c2acad2712 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1837.028908] env[68040]: DEBUG nova.compute.manager [-] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1837.028994] env[68040]: DEBUG nova.network.neutron [-] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1837.054282] env[68040]: DEBUG nova.network.neutron [-] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1837.062715] env[68040]: INFO nova.compute.manager [-] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] Took 0.03 seconds to deallocate network for instance. [ 1837.156947] env[68040]: DEBUG oslo_concurrency.lockutils [None req-5bde59f3-ebee-405d-8b32-34c2acad2712 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Lock "4a08d3e3-5e84-4f34-b418-2c18eadbef25" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.172s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1837.161020] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "4a08d3e3-5e84-4f34-b418-2c18eadbef25" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 1.970s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1837.161020] env[68040]: INFO nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 4a08d3e3-5e84-4f34-b418-2c18eadbef25] During sync_power_state the instance has a pending task (deleting). Skip. [ 1837.161020] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "4a08d3e3-5e84-4f34-b418-2c18eadbef25" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1864.124934] env[68040]: DEBUG oslo_concurrency.lockutils [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Acquiring lock "8a1a6866-1439-4f82-9fda-a7d9a7f211a3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1864.124934] env[68040]: DEBUG oslo_concurrency.lockutils [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Lock "8a1a6866-1439-4f82-9fda-a7d9a7f211a3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1864.137936] env[68040]: DEBUG nova.compute.manager [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1864.206127] env[68040]: DEBUG oslo_concurrency.lockutils [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1864.206402] env[68040]: DEBUG oslo_concurrency.lockutils [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1864.207920] env[68040]: INFO nova.compute.claims [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1864.391196] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69c0d8e1-8070-4267-be27-ddce7245e805 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.399226] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fab5f3f-328d-4fc1-a071-4f04ba5e28ea {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.428053] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-540756bd-550e-4a01-a824-64b243eee70d {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.434709] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbf57d41-6bd7-4375-9509-51b8b1da8fd6 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.448172] env[68040]: DEBUG nova.compute.provider_tree [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1864.456804] env[68040]: DEBUG nova.scheduler.client.report [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1864.473552] env[68040]: DEBUG oslo_concurrency.lockutils [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.267s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1864.474030] env[68040]: DEBUG nova.compute.manager [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Start building networks asynchronously for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1864.505472] env[68040]: DEBUG nova.compute.utils [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Using /dev/sd instead of None {{(pid=68040) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1864.506802] env[68040]: DEBUG nova.compute.manager [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Allocating IP information in the background. {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1864.506974] env[68040]: DEBUG nova.network.neutron [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] allocate_for_instance() {{(pid=68040) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1864.514847] env[68040]: DEBUG nova.compute.manager [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Start building block device mappings for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1864.560113] env[68040]: DEBUG nova.policy [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2d827affb8fa4ee6abe00918076b629e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ba36e75b6181468a80999043bb27346c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68040) authorize /opt/stack/nova/nova/policy.py:203}} [ 1864.578669] env[68040]: DEBUG nova.compute.manager [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Start spawning the instance on the hypervisor. {{(pid=68040) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1864.603642] env[68040]: DEBUG nova.virt.hardware [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:59:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:59:33Z,direct_url=,disk_format='vmdk',id=8c308313-03d5-40b6-a5fe-9037e32dc76e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0770d674a39c40089de0aade9440b370',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:59:34Z,virtual_size=,visibility=), allow threads: False {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1864.603879] env[68040]: DEBUG nova.virt.hardware [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Flavor limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1864.604088] env[68040]: DEBUG nova.virt.hardware [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Image limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1864.604305] env[68040]: DEBUG nova.virt.hardware [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Flavor pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1864.604444] env[68040]: DEBUG nova.virt.hardware [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Image pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1864.604594] env[68040]: DEBUG nova.virt.hardware [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1864.604807] env[68040]: DEBUG nova.virt.hardware [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1864.604972] env[68040]: DEBUG nova.virt.hardware [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1864.605158] env[68040]: DEBUG nova.virt.hardware [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Got 1 possible topologies {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1864.605327] env[68040]: DEBUG nova.virt.hardware [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1864.605502] env[68040]: DEBUG nova.virt.hardware [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1864.606367] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4dba8e8-4104-46df-90c9-077c7def53e8 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.614221] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db66ab65-edf6-4ef7-8a52-c06d11ece8bf {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.924166] env[68040]: DEBUG nova.network.neutron [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Successfully created port: 0af3579d-ccd2-4b83-a4df-c3fb0cf9ec31 {{(pid=68040) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1865.875706] env[68040]: DEBUG nova.compute.manager [req-9b714991-bb65-40b7-9545-b0210c0650b7 req-41d2ec32-cce4-44f3-8b58-fab81200e795 service nova] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Received event network-vif-plugged-0af3579d-ccd2-4b83-a4df-c3fb0cf9ec31 {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1865.876000] env[68040]: DEBUG oslo_concurrency.lockutils [req-9b714991-bb65-40b7-9545-b0210c0650b7 req-41d2ec32-cce4-44f3-8b58-fab81200e795 service nova] Acquiring lock "8a1a6866-1439-4f82-9fda-a7d9a7f211a3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1865.876136] env[68040]: DEBUG oslo_concurrency.lockutils [req-9b714991-bb65-40b7-9545-b0210c0650b7 req-41d2ec32-cce4-44f3-8b58-fab81200e795 service nova] Lock "8a1a6866-1439-4f82-9fda-a7d9a7f211a3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1865.876311] env[68040]: DEBUG oslo_concurrency.lockutils [req-9b714991-bb65-40b7-9545-b0210c0650b7 req-41d2ec32-cce4-44f3-8b58-fab81200e795 service nova] Lock "8a1a6866-1439-4f82-9fda-a7d9a7f211a3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1865.876475] env[68040]: DEBUG nova.compute.manager [req-9b714991-bb65-40b7-9545-b0210c0650b7 req-41d2ec32-cce4-44f3-8b58-fab81200e795 service nova] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] No waiting events found dispatching network-vif-plugged-0af3579d-ccd2-4b83-a4df-c3fb0cf9ec31 {{(pid=68040) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1865.876651] env[68040]: WARNING nova.compute.manager [req-9b714991-bb65-40b7-9545-b0210c0650b7 req-41d2ec32-cce4-44f3-8b58-fab81200e795 service nova] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Received unexpected event network-vif-plugged-0af3579d-ccd2-4b83-a4df-c3fb0cf9ec31 for instance with vm_state building and task_state spawning. [ 1865.987107] env[68040]: DEBUG nova.network.neutron [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Successfully updated port: 0af3579d-ccd2-4b83-a4df-c3fb0cf9ec31 {{(pid=68040) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1865.997741] env[68040]: DEBUG oslo_concurrency.lockutils [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Acquiring lock "refresh_cache-8a1a6866-1439-4f82-9fda-a7d9a7f211a3" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1865.997890] env[68040]: DEBUG oslo_concurrency.lockutils [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Acquired lock "refresh_cache-8a1a6866-1439-4f82-9fda-a7d9a7f211a3" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1865.998052] env[68040]: DEBUG nova.network.neutron [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Building network info cache for instance {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1866.044641] env[68040]: DEBUG nova.network.neutron [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1866.251078] env[68040]: DEBUG nova.network.neutron [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Updating instance_info_cache with network_info: [{"id": "0af3579d-ccd2-4b83-a4df-c3fb0cf9ec31", "address": "fa:16:3e:a3:d6:8b", "network": {"id": "8ab8aca2-f199-4056-a7a7-9df3bd781a4a", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-576267632-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba36e75b6181468a80999043bb27346c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49a4d142-3f97-47fe-b074-58923c46815e", "external-id": "nsx-vlan-transportzone-565", "segmentation_id": 565, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0af3579d-cc", "ovs_interfaceid": "0af3579d-ccd2-4b83-a4df-c3fb0cf9ec31", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1866.263416] env[68040]: DEBUG oslo_concurrency.lockutils [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Releasing lock "refresh_cache-8a1a6866-1439-4f82-9fda-a7d9a7f211a3" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1866.263712] env[68040]: DEBUG nova.compute.manager [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Instance network_info: |[{"id": "0af3579d-ccd2-4b83-a4df-c3fb0cf9ec31", "address": "fa:16:3e:a3:d6:8b", "network": {"id": "8ab8aca2-f199-4056-a7a7-9df3bd781a4a", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-576267632-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba36e75b6181468a80999043bb27346c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49a4d142-3f97-47fe-b074-58923c46815e", "external-id": "nsx-vlan-transportzone-565", "segmentation_id": 565, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0af3579d-cc", "ovs_interfaceid": "0af3579d-ccd2-4b83-a4df-c3fb0cf9ec31", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1866.264166] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a3:d6:8b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '49a4d142-3f97-47fe-b074-58923c46815e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0af3579d-ccd2-4b83-a4df-c3fb0cf9ec31', 'vif_model': 'vmxnet3'}] {{(pid=68040) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1866.271812] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Creating folder: Project (ba36e75b6181468a80999043bb27346c). Parent ref: group-v639956. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1866.272699] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c74a93b3-7cd4-402a-b250-e03a4d8e8ba6 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.284193] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Created folder: Project (ba36e75b6181468a80999043bb27346c) in parent group-v639956. [ 1866.284380] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Creating folder: Instances. Parent ref: group-v640055. {{(pid=68040) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1866.284597] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-75cfe2ac-8621-4a59-98c0-492182f5a5de {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.293170] env[68040]: INFO nova.virt.vmwareapi.vm_util [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Created folder: Instances in parent group-v640055. [ 1866.293405] env[68040]: DEBUG oslo.service.loopingcall [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1866.293581] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Creating VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1866.293770] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-89c4bfcb-8734-43e1-944a-0ded08327aeb {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.313045] env[68040]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1866.313045] env[68040]: value = "task-3200328" [ 1866.313045] env[68040]: _type = "Task" [ 1866.313045] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1866.320337] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200328, 'name': CreateVM_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1866.822838] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200328, 'name': CreateVM_Task, 'duration_secs': 0.297259} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1866.823036] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Created VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1866.823746] env[68040]: DEBUG oslo_concurrency.lockutils [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1866.823913] env[68040]: DEBUG oslo_concurrency.lockutils [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1866.824282] env[68040]: DEBUG oslo_concurrency.lockutils [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1866.824521] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3247612c-ef3c-4e29-a291-07c659168e7c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.828940] env[68040]: DEBUG oslo_vmware.api [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Waiting for the task: (returnval){ [ 1866.828940] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]523054d4-9e9f-d402-0b23-22ad81d096c3" [ 1866.828940] env[68040]: _type = "Task" [ 1866.828940] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1866.838542] env[68040]: DEBUG oslo_vmware.api [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]523054d4-9e9f-d402-0b23-22ad81d096c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.338781] env[68040]: DEBUG oslo_concurrency.lockutils [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1867.339099] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Processing image 8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1867.339275] env[68040]: DEBUG oslo_concurrency.lockutils [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1868.023987] env[68040]: DEBUG nova.compute.manager [req-9b051ea3-a4c9-4120-bf0c-b44255945022 req-91039b1e-7f85-483b-a8eb-80de92889f7d service nova] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Received event network-changed-0af3579d-ccd2-4b83-a4df-c3fb0cf9ec31 {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1868.024243] env[68040]: DEBUG nova.compute.manager [req-9b051ea3-a4c9-4120-bf0c-b44255945022 req-91039b1e-7f85-483b-a8eb-80de92889f7d service nova] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Refreshing instance network info cache due to event network-changed-0af3579d-ccd2-4b83-a4df-c3fb0cf9ec31. {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1868.024481] env[68040]: DEBUG oslo_concurrency.lockutils [req-9b051ea3-a4c9-4120-bf0c-b44255945022 req-91039b1e-7f85-483b-a8eb-80de92889f7d service nova] Acquiring lock "refresh_cache-8a1a6866-1439-4f82-9fda-a7d9a7f211a3" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1868.024628] env[68040]: DEBUG oslo_concurrency.lockutils [req-9b051ea3-a4c9-4120-bf0c-b44255945022 req-91039b1e-7f85-483b-a8eb-80de92889f7d service nova] Acquired lock "refresh_cache-8a1a6866-1439-4f82-9fda-a7d9a7f211a3" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1868.024814] env[68040]: DEBUG nova.network.neutron [req-9b051ea3-a4c9-4120-bf0c-b44255945022 req-91039b1e-7f85-483b-a8eb-80de92889f7d service nova] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Refreshing network info cache for port 0af3579d-ccd2-4b83-a4df-c3fb0cf9ec31 {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1868.305023] env[68040]: DEBUG nova.network.neutron [req-9b051ea3-a4c9-4120-bf0c-b44255945022 req-91039b1e-7f85-483b-a8eb-80de92889f7d service nova] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Updated VIF entry in instance network info cache for port 0af3579d-ccd2-4b83-a4df-c3fb0cf9ec31. {{(pid=68040) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1868.305450] env[68040]: DEBUG nova.network.neutron [req-9b051ea3-a4c9-4120-bf0c-b44255945022 req-91039b1e-7f85-483b-a8eb-80de92889f7d service nova] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Updating instance_info_cache with network_info: [{"id": "0af3579d-ccd2-4b83-a4df-c3fb0cf9ec31", "address": "fa:16:3e:a3:d6:8b", "network": {"id": "8ab8aca2-f199-4056-a7a7-9df3bd781a4a", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-576267632-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba36e75b6181468a80999043bb27346c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49a4d142-3f97-47fe-b074-58923c46815e", "external-id": "nsx-vlan-transportzone-565", "segmentation_id": 565, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0af3579d-cc", "ovs_interfaceid": "0af3579d-ccd2-4b83-a4df-c3fb0cf9ec31", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1868.315115] env[68040]: DEBUG oslo_concurrency.lockutils [req-9b051ea3-a4c9-4120-bf0c-b44255945022 req-91039b1e-7f85-483b-a8eb-80de92889f7d service nova] Releasing lock "refresh_cache-8a1a6866-1439-4f82-9fda-a7d9a7f211a3" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1870.025468] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1874.986315] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1877.985417] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1877.985844] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Starting heal instance info cache {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1877.985844] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Rebuilding the list of instances to heal {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1878.008951] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1878.009144] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1878.009240] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1878.009367] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1878.009496] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1878.009619] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1878.009742] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1878.009861] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1878.009979] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1878.010110] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1878.010233] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Didn't find any instances for network info cache update. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1878.983744] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1878.996224] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1878.996573] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1878.996619] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1878.996754] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68040) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1878.997882] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-671def3f-46fc-4308-aa78-977df33ec059 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.006508] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e407d27-a3e2-4d98-bf24-5cb56c4a7d66 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.020547] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11150bb9-4353-4484-8e06-6ff69d675150 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.026888] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-530db7ad-ac80-44e5-a33a-f5c71f48c8d3 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.056308] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180977MB free_disk=125GB free_vcpus=48 pci_devices=None {{(pid=68040) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1879.056447] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1879.056643] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1879.128346] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 268b5613-b132-49ed-a45b-bc88132177cf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1879.128527] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 4dfa01f8-53a0-4ee4-9b00-93017144ea0b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1879.128644] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1879.128770] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1879.128892] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 221a5bbe-7168-4f5c-ab49-8a149545655f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1879.129024] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 031481de-d52f-4f3f-80e5-0d0d6803d624 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1879.129157] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 2e44ead1-4676-4d9b-bbae-5082f505fc8b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1879.129277] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 0210d9d4-2161-4b06-bc81-9de361accca6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1879.129395] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 8f9a6934-9ded-4561-8d83-aacd4d79f29a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1879.129512] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 8a1a6866-1439-4f82-9fda-a7d9a7f211a3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1879.129709] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1879.129846] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1879.246142] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfb44f73-360c-40bf-a751-dd64e4bda3fa {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.254093] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59d45ef2-eb07-4f6b-8d19-b91346f0b92f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.283623] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab3d05b0-ac50-4339-8387-2fb9c4b2bc1e {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.290887] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-890040a8-50ae-4053-813b-369779a42fe4 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.304227] env[68040]: DEBUG nova.compute.provider_tree [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1879.312755] env[68040]: DEBUG nova.scheduler.client.report [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1879.327784] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68040) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1879.327977] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.271s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1880.328419] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1880.328802] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1880.984068] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1882.983689] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1882.984117] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68040) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1883.275634] env[68040]: WARNING oslo_vmware.rw_handles [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1883.275634] env[68040]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1883.275634] env[68040]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1883.275634] env[68040]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1883.275634] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1883.275634] env[68040]: ERROR oslo_vmware.rw_handles response.begin() [ 1883.275634] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1883.275634] env[68040]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1883.275634] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1883.275634] env[68040]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1883.275634] env[68040]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1883.275634] env[68040]: ERROR oslo_vmware.rw_handles [ 1883.276236] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Downloaded image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to vmware_temp/4343e44a-385d-4048-9c1c-4919b9f0dd67/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1883.278409] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Caching image {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1883.278706] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Copying Virtual Disk [datastore2] vmware_temp/4343e44a-385d-4048-9c1c-4919b9f0dd67/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk to [datastore2] vmware_temp/4343e44a-385d-4048-9c1c-4919b9f0dd67/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk {{(pid=68040) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1883.279041] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-78535fc1-1001-459f-915f-6ac3e363a1ac {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.288206] env[68040]: DEBUG oslo_vmware.api [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Waiting for the task: (returnval){ [ 1883.288206] env[68040]: value = "task-3200329" [ 1883.288206] env[68040]: _type = "Task" [ 1883.288206] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1883.295946] env[68040]: DEBUG oslo_vmware.api [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Task: {'id': task-3200329, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1883.798598] env[68040]: DEBUG oslo_vmware.exceptions [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Fault InvalidArgument not matched. {{(pid=68040) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1883.798885] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1883.799501] env[68040]: ERROR nova.compute.manager [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1883.799501] env[68040]: Faults: ['InvalidArgument'] [ 1883.799501] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Traceback (most recent call last): [ 1883.799501] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1883.799501] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] yield resources [ 1883.799501] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1883.799501] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] self.driver.spawn(context, instance, image_meta, [ 1883.799501] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1883.799501] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1883.799501] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1883.799501] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] self._fetch_image_if_missing(context, vi) [ 1883.799501] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1883.799880] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] image_cache(vi, tmp_image_ds_loc) [ 1883.799880] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1883.799880] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] vm_util.copy_virtual_disk( [ 1883.799880] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1883.799880] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] session._wait_for_task(vmdk_copy_task) [ 1883.799880] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1883.799880] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] return self.wait_for_task(task_ref) [ 1883.799880] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1883.799880] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] return evt.wait() [ 1883.799880] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1883.799880] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] result = hub.switch() [ 1883.799880] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1883.799880] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] return self.greenlet.switch() [ 1883.800350] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1883.800350] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] self.f(*self.args, **self.kw) [ 1883.800350] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1883.800350] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] raise exceptions.translate_fault(task_info.error) [ 1883.800350] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1883.800350] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Faults: ['InvalidArgument'] [ 1883.800350] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] [ 1883.800350] env[68040]: INFO nova.compute.manager [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Terminating instance [ 1883.801360] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1883.801565] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1883.801805] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2ce78915-e4ac-46a6-b6df-191ad9e6deca {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.803950] env[68040]: DEBUG nova.compute.manager [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1883.804164] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1883.804873] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a10ed9b9-bfcb-4409-b148-a7feeeccd57c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.811573] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Unregistering the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1883.811818] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-17258d4e-187d-472f-b04e-1237f16bbd85 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.814009] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1883.814197] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68040) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1883.815206] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62dcfba0-3c04-465a-a610-c9c7d964a094 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.819838] env[68040]: DEBUG oslo_vmware.api [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Waiting for the task: (returnval){ [ 1883.819838] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52a12b2a-7bd3-0152-f386-5dde6465c730" [ 1883.819838] env[68040]: _type = "Task" [ 1883.819838] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1883.829716] env[68040]: DEBUG oslo_vmware.api [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52a12b2a-7bd3-0152-f386-5dde6465c730, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1883.876329] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Unregistered the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1883.876569] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Deleting contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1883.876713] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Deleting the datastore file [datastore2] 268b5613-b132-49ed-a45b-bc88132177cf {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1883.876976] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e58268df-b49f-4815-b193-b3ac84b9921a {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.883065] env[68040]: DEBUG oslo_vmware.api [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Waiting for the task: (returnval){ [ 1883.883065] env[68040]: value = "task-3200331" [ 1883.883065] env[68040]: _type = "Task" [ 1883.883065] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1883.890516] env[68040]: DEBUG oslo_vmware.api [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Task: {'id': task-3200331, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1883.979273] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1884.331070] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Preparing fetch location {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1884.331070] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Creating directory with path [datastore2] vmware_temp/83237a80-b46d-406f-8acd-5555549e9aad/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1884.331070] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a4b36333-9c2c-40ef-930b-083b5eb6288c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.346486] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Created directory with path [datastore2] vmware_temp/83237a80-b46d-406f-8acd-5555549e9aad/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1884.346720] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Fetch image to [datastore2] vmware_temp/83237a80-b46d-406f-8acd-5555549e9aad/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1884.346864] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to [datastore2] vmware_temp/83237a80-b46d-406f-8acd-5555549e9aad/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1884.347642] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7db1fdc4-356a-44ed-b0e3-fb0ca3536672 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.354256] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85f9a1ea-6140-49f1-94a5-6288a60a9a3b {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.363234] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f598cd7-fcb4-4a7c-8c58-aea39aa35804 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.396799] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe05132e-08e2-4f0a-b8df-606dbe934539 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.406388] env[68040]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-2b33c410-8e96-466c-afa9-05222a87381f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.407999] env[68040]: DEBUG oslo_vmware.api [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Task: {'id': task-3200331, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.064875} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1884.408254] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Deleted the datastore file {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1884.408439] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Deleted contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1884.408610] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1884.408781] env[68040]: INFO nova.compute.manager [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1884.410850] env[68040]: DEBUG nova.compute.claims [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Aborting claim: {{(pid=68040) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1884.411052] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1884.411289] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1884.429616] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1884.593893] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0a936d9-ead1-4f84-bed7-08902b7fc790 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.601798] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db0767a0-757f-4273-a157-392d1c8414cc {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.605450] env[68040]: DEBUG oslo_vmware.rw_handles [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/83237a80-b46d-406f-8acd-5555549e9aad/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1884.686511] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc489b3e-8343-49af-aea8-400da67c57fc {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.689495] env[68040]: DEBUG oslo_vmware.rw_handles [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Completed reading data from the image iterator. {{(pid=68040) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1884.689665] env[68040]: DEBUG oslo_vmware.rw_handles [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/83237a80-b46d-406f-8acd-5555549e9aad/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1884.694913] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f98a3f37-a8d8-4b4a-8cab-831f6e5dff34 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.707874] env[68040]: DEBUG nova.compute.provider_tree [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1884.716654] env[68040]: DEBUG nova.scheduler.client.report [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1884.730103] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.319s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1884.730631] env[68040]: ERROR nova.compute.manager [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1884.730631] env[68040]: Faults: ['InvalidArgument'] [ 1884.730631] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Traceback (most recent call last): [ 1884.730631] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1884.730631] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] self.driver.spawn(context, instance, image_meta, [ 1884.730631] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1884.730631] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1884.730631] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1884.730631] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] self._fetch_image_if_missing(context, vi) [ 1884.730631] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1884.730631] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] image_cache(vi, tmp_image_ds_loc) [ 1884.730631] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1884.731085] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] vm_util.copy_virtual_disk( [ 1884.731085] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1884.731085] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] session._wait_for_task(vmdk_copy_task) [ 1884.731085] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1884.731085] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] return self.wait_for_task(task_ref) [ 1884.731085] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1884.731085] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] return evt.wait() [ 1884.731085] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1884.731085] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] result = hub.switch() [ 1884.731085] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1884.731085] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] return self.greenlet.switch() [ 1884.731085] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1884.731085] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] self.f(*self.args, **self.kw) [ 1884.731449] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1884.731449] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] raise exceptions.translate_fault(task_info.error) [ 1884.731449] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1884.731449] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Faults: ['InvalidArgument'] [ 1884.731449] env[68040]: ERROR nova.compute.manager [instance: 268b5613-b132-49ed-a45b-bc88132177cf] [ 1884.731449] env[68040]: DEBUG nova.compute.utils [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] VimFaultException {{(pid=68040) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1884.732723] env[68040]: DEBUG nova.compute.manager [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Build of instance 268b5613-b132-49ed-a45b-bc88132177cf was re-scheduled: A specified parameter was not correct: fileType [ 1884.732723] env[68040]: Faults: ['InvalidArgument'] {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1884.733116] env[68040]: DEBUG nova.compute.manager [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Unplugging VIFs for instance {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1884.733316] env[68040]: DEBUG nova.compute.manager [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1884.733495] env[68040]: DEBUG nova.compute.manager [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1884.733663] env[68040]: DEBUG nova.network.neutron [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1885.054037] env[68040]: DEBUG nova.network.neutron [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1885.067335] env[68040]: INFO nova.compute.manager [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Took 0.33 seconds to deallocate network for instance. [ 1885.174297] env[68040]: INFO nova.scheduler.client.report [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Deleted allocations for instance 268b5613-b132-49ed-a45b-bc88132177cf [ 1885.194316] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e77b49ca-0350-436e-baa9-224d91da213f tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Lock "268b5613-b132-49ed-a45b-bc88132177cf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 605.826s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1885.194599] env[68040]: DEBUG oslo_concurrency.lockutils [None req-eef1839f-ce2d-4fc0-8bf3-d02dc3f75e83 tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Lock "268b5613-b132-49ed-a45b-bc88132177cf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 211.578s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1885.194831] env[68040]: DEBUG oslo_concurrency.lockutils [None req-eef1839f-ce2d-4fc0-8bf3-d02dc3f75e83 tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Acquiring lock "268b5613-b132-49ed-a45b-bc88132177cf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1885.195053] env[68040]: DEBUG oslo_concurrency.lockutils [None req-eef1839f-ce2d-4fc0-8bf3-d02dc3f75e83 tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Lock "268b5613-b132-49ed-a45b-bc88132177cf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1885.195233] env[68040]: DEBUG oslo_concurrency.lockutils [None req-eef1839f-ce2d-4fc0-8bf3-d02dc3f75e83 tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Lock "268b5613-b132-49ed-a45b-bc88132177cf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1885.197100] env[68040]: INFO nova.compute.manager [None req-eef1839f-ce2d-4fc0-8bf3-d02dc3f75e83 tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Terminating instance [ 1885.198870] env[68040]: DEBUG nova.compute.manager [None req-eef1839f-ce2d-4fc0-8bf3-d02dc3f75e83 tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1885.199090] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-eef1839f-ce2d-4fc0-8bf3-d02dc3f75e83 tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1885.199551] env[68040]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-85cfd388-0b1f-4b13-ae2d-11e840d548cf {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.208933] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d67dc3c-7e85-4939-bbb2-beb2b8d76ee1 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.237302] env[68040]: WARNING nova.virt.vmwareapi.vmops [None req-eef1839f-ce2d-4fc0-8bf3-d02dc3f75e83 tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 268b5613-b132-49ed-a45b-bc88132177cf could not be found. [ 1885.237502] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-eef1839f-ce2d-4fc0-8bf3-d02dc3f75e83 tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1885.237684] env[68040]: INFO nova.compute.manager [None req-eef1839f-ce2d-4fc0-8bf3-d02dc3f75e83 tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1885.237929] env[68040]: DEBUG oslo.service.loopingcall [None req-eef1839f-ce2d-4fc0-8bf3-d02dc3f75e83 tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1885.238169] env[68040]: DEBUG nova.compute.manager [-] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1885.238268] env[68040]: DEBUG nova.network.neutron [-] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1885.274492] env[68040]: DEBUG nova.network.neutron [-] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1885.282622] env[68040]: INFO nova.compute.manager [-] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] Took 0.04 seconds to deallocate network for instance. [ 1885.361822] env[68040]: DEBUG oslo_concurrency.lockutils [None req-eef1839f-ce2d-4fc0-8bf3-d02dc3f75e83 tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Lock "268b5613-b132-49ed-a45b-bc88132177cf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.167s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1885.362637] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "268b5613-b132-49ed-a45b-bc88132177cf" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 50.174s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1885.362819] env[68040]: INFO nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 268b5613-b132-49ed-a45b-bc88132177cf] During sync_power_state the instance has a pending task (deleting). Skip. [ 1885.362996] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "268b5613-b132-49ed-a45b-bc88132177cf" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1888.642064] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Acquiring lock "92b0f3c0-2c87-478d-8b11-f0b05aee12ed" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1888.642064] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Lock "92b0f3c0-2c87-478d-8b11-f0b05aee12ed" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1888.654568] env[68040]: DEBUG nova.compute.manager [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1888.710027] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1888.710291] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1888.711800] env[68040]: INFO nova.compute.claims [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1888.924582] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36458393-5e5c-4e18-915a-1fb0f68b4152 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.933866] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e9574a1-050e-408a-8718-3166472add55 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.963923] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d03da6e-e368-4687-b556-e1e8d3bdf020 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.971650] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfca1242-1b20-4c0b-827d-ccf2f7306d5f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.985382] env[68040]: DEBUG nova.compute.provider_tree [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1888.996542] env[68040]: DEBUG nova.scheduler.client.report [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1889.013876] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.303s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1889.014351] env[68040]: DEBUG nova.compute.manager [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Start building networks asynchronously for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1889.050840] env[68040]: DEBUG nova.compute.utils [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Using /dev/sd instead of None {{(pid=68040) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1889.052712] env[68040]: DEBUG nova.compute.manager [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Allocating IP information in the background. {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1889.052712] env[68040]: DEBUG nova.network.neutron [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] allocate_for_instance() {{(pid=68040) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1889.064010] env[68040]: DEBUG nova.compute.manager [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Start building block device mappings for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1889.139585] env[68040]: DEBUG nova.compute.manager [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Start spawning the instance on the hypervisor. {{(pid=68040) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1889.168559] env[68040]: DEBUG nova.virt.hardware [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:59:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:59:33Z,direct_url=,disk_format='vmdk',id=8c308313-03d5-40b6-a5fe-9037e32dc76e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0770d674a39c40089de0aade9440b370',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:59:34Z,virtual_size=,visibility=), allow threads: False {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1889.168798] env[68040]: DEBUG nova.virt.hardware [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Flavor limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1889.168960] env[68040]: DEBUG nova.virt.hardware [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Image limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1889.169161] env[68040]: DEBUG nova.virt.hardware [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Flavor pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1889.169313] env[68040]: DEBUG nova.virt.hardware [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Image pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1889.169463] env[68040]: DEBUG nova.virt.hardware [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1889.169670] env[68040]: DEBUG nova.virt.hardware [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1889.169831] env[68040]: DEBUG nova.virt.hardware [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1889.170011] env[68040]: DEBUG nova.virt.hardware [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Got 1 possible topologies {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1889.170197] env[68040]: DEBUG nova.virt.hardware [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1889.170377] env[68040]: DEBUG nova.virt.hardware [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1889.171277] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c4c2abc-d243-4a39-951f-715a96d8d3e6 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.179831] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a16742f0-e825-4604-b7e8-b72b3b3629fd {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.199456] env[68040]: DEBUG nova.policy [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c8e66b0d9ada4cabbb8efd2e8340a3a9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '52dbf578e94a4db7af130703ad4eb741', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68040) authorize /opt/stack/nova/nova/policy.py:203}} [ 1889.589875] env[68040]: DEBUG nova.network.neutron [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Successfully created port: bb96e0d1-1a10-4e40-ba5a-14526502c64b {{(pid=68040) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1890.230405] env[68040]: DEBUG nova.compute.manager [req-31f08a40-554d-413a-8501-04d80db777d7 req-219447b8-2886-4970-b5fc-6bccaf2570b3 service nova] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Received event network-vif-plugged-bb96e0d1-1a10-4e40-ba5a-14526502c64b {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1890.230405] env[68040]: DEBUG oslo_concurrency.lockutils [req-31f08a40-554d-413a-8501-04d80db777d7 req-219447b8-2886-4970-b5fc-6bccaf2570b3 service nova] Acquiring lock "92b0f3c0-2c87-478d-8b11-f0b05aee12ed-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1890.230405] env[68040]: DEBUG oslo_concurrency.lockutils [req-31f08a40-554d-413a-8501-04d80db777d7 req-219447b8-2886-4970-b5fc-6bccaf2570b3 service nova] Lock "92b0f3c0-2c87-478d-8b11-f0b05aee12ed-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1890.230405] env[68040]: DEBUG oslo_concurrency.lockutils [req-31f08a40-554d-413a-8501-04d80db777d7 req-219447b8-2886-4970-b5fc-6bccaf2570b3 service nova] Lock "92b0f3c0-2c87-478d-8b11-f0b05aee12ed-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1890.230836] env[68040]: DEBUG nova.compute.manager [req-31f08a40-554d-413a-8501-04d80db777d7 req-219447b8-2886-4970-b5fc-6bccaf2570b3 service nova] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] No waiting events found dispatching network-vif-plugged-bb96e0d1-1a10-4e40-ba5a-14526502c64b {{(pid=68040) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1890.230836] env[68040]: WARNING nova.compute.manager [req-31f08a40-554d-413a-8501-04d80db777d7 req-219447b8-2886-4970-b5fc-6bccaf2570b3 service nova] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Received unexpected event network-vif-plugged-bb96e0d1-1a10-4e40-ba5a-14526502c64b for instance with vm_state building and task_state spawning. [ 1890.605543] env[68040]: DEBUG nova.network.neutron [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Successfully updated port: bb96e0d1-1a10-4e40-ba5a-14526502c64b {{(pid=68040) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1890.618959] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Acquiring lock "refresh_cache-92b0f3c0-2c87-478d-8b11-f0b05aee12ed" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1890.621167] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Acquired lock "refresh_cache-92b0f3c0-2c87-478d-8b11-f0b05aee12ed" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1890.621167] env[68040]: DEBUG nova.network.neutron [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Building network info cache for instance {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1890.684649] env[68040]: DEBUG nova.network.neutron [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1890.843306] env[68040]: DEBUG nova.network.neutron [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Updating instance_info_cache with network_info: [{"id": "bb96e0d1-1a10-4e40-ba5a-14526502c64b", "address": "fa:16:3e:43:2b:26", "network": {"id": "9839fe48-68c5-4649-bd83-6b4d9c6008e8", "bridge": "br-int", "label": "tempest-ServersTestJSON-1965746643-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52dbf578e94a4db7af130703ad4eb741", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4b033f4d-2e92-4702-add6-410a29d3f251", "external-id": "nsx-vlan-transportzone-649", "segmentation_id": 649, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb96e0d1-1a", "ovs_interfaceid": "bb96e0d1-1a10-4e40-ba5a-14526502c64b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1890.855725] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Releasing lock "refresh_cache-92b0f3c0-2c87-478d-8b11-f0b05aee12ed" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1890.856015] env[68040]: DEBUG nova.compute.manager [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Instance network_info: |[{"id": "bb96e0d1-1a10-4e40-ba5a-14526502c64b", "address": "fa:16:3e:43:2b:26", "network": {"id": "9839fe48-68c5-4649-bd83-6b4d9c6008e8", "bridge": "br-int", "label": "tempest-ServersTestJSON-1965746643-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52dbf578e94a4db7af130703ad4eb741", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4b033f4d-2e92-4702-add6-410a29d3f251", "external-id": "nsx-vlan-transportzone-649", "segmentation_id": 649, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb96e0d1-1a", "ovs_interfaceid": "bb96e0d1-1a10-4e40-ba5a-14526502c64b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1890.856422] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:43:2b:26', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4b033f4d-2e92-4702-add6-410a29d3f251', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bb96e0d1-1a10-4e40-ba5a-14526502c64b', 'vif_model': 'vmxnet3'}] {{(pid=68040) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1890.863777] env[68040]: DEBUG oslo.service.loopingcall [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1890.864234] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Creating VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1890.864464] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e33ec17c-a9c5-4ee4-a5f2-1d1b8a0357ca {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.884469] env[68040]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1890.884469] env[68040]: value = "task-3200332" [ 1890.884469] env[68040]: _type = "Task" [ 1890.884469] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1890.892121] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200332, 'name': CreateVM_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1891.394734] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200332, 'name': CreateVM_Task, 'duration_secs': 0.282735} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1891.395131] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Created VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1891.395577] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1891.395876] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1891.396269] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1891.396515] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-14841506-d9bb-4e0c-b849-2993c1328f71 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.400768] env[68040]: DEBUG oslo_vmware.api [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Waiting for the task: (returnval){ [ 1891.400768] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52f854ca-63b4-de98-9ebe-6aa6f2435194" [ 1891.400768] env[68040]: _type = "Task" [ 1891.400768] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1891.408125] env[68040]: DEBUG oslo_vmware.api [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52f854ca-63b4-de98-9ebe-6aa6f2435194, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1891.911475] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1891.911689] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Processing image 8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1891.911905] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1892.254613] env[68040]: DEBUG nova.compute.manager [req-c69c8a6b-035c-4fd0-9d7d-02640e901727 req-1d31d577-ab83-4fc4-9e90-c5581341c799 service nova] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Received event network-changed-bb96e0d1-1a10-4e40-ba5a-14526502c64b {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1892.254781] env[68040]: DEBUG nova.compute.manager [req-c69c8a6b-035c-4fd0-9d7d-02640e901727 req-1d31d577-ab83-4fc4-9e90-c5581341c799 service nova] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Refreshing instance network info cache due to event network-changed-bb96e0d1-1a10-4e40-ba5a-14526502c64b. {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1892.255013] env[68040]: DEBUG oslo_concurrency.lockutils [req-c69c8a6b-035c-4fd0-9d7d-02640e901727 req-1d31d577-ab83-4fc4-9e90-c5581341c799 service nova] Acquiring lock "refresh_cache-92b0f3c0-2c87-478d-8b11-f0b05aee12ed" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1892.255169] env[68040]: DEBUG oslo_concurrency.lockutils [req-c69c8a6b-035c-4fd0-9d7d-02640e901727 req-1d31d577-ab83-4fc4-9e90-c5581341c799 service nova] Acquired lock "refresh_cache-92b0f3c0-2c87-478d-8b11-f0b05aee12ed" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1892.255329] env[68040]: DEBUG nova.network.neutron [req-c69c8a6b-035c-4fd0-9d7d-02640e901727 req-1d31d577-ab83-4fc4-9e90-c5581341c799 service nova] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Refreshing network info cache for port bb96e0d1-1a10-4e40-ba5a-14526502c64b {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1892.492649] env[68040]: DEBUG nova.network.neutron [req-c69c8a6b-035c-4fd0-9d7d-02640e901727 req-1d31d577-ab83-4fc4-9e90-c5581341c799 service nova] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Updated VIF entry in instance network info cache for port bb96e0d1-1a10-4e40-ba5a-14526502c64b. {{(pid=68040) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1892.493024] env[68040]: DEBUG nova.network.neutron [req-c69c8a6b-035c-4fd0-9d7d-02640e901727 req-1d31d577-ab83-4fc4-9e90-c5581341c799 service nova] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Updating instance_info_cache with network_info: [{"id": "bb96e0d1-1a10-4e40-ba5a-14526502c64b", "address": "fa:16:3e:43:2b:26", "network": {"id": "9839fe48-68c5-4649-bd83-6b4d9c6008e8", "bridge": "br-int", "label": "tempest-ServersTestJSON-1965746643-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52dbf578e94a4db7af130703ad4eb741", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4b033f4d-2e92-4702-add6-410a29d3f251", "external-id": "nsx-vlan-transportzone-649", "segmentation_id": 649, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb96e0d1-1a", "ovs_interfaceid": "bb96e0d1-1a10-4e40-ba5a-14526502c64b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1892.502490] env[68040]: DEBUG oslo_concurrency.lockutils [req-c69c8a6b-035c-4fd0-9d7d-02640e901727 req-1d31d577-ab83-4fc4-9e90-c5581341c799 service nova] Releasing lock "refresh_cache-92b0f3c0-2c87-478d-8b11-f0b05aee12ed" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1916.178107] env[68040]: DEBUG oslo_concurrency.lockutils [None req-103b1d88-38df-4185-8b30-0df4f574425b tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Acquiring lock "8f9a6934-9ded-4561-8d83-aacd4d79f29a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1929.984471] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1932.153063] env[68040]: WARNING oslo_vmware.rw_handles [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1932.153063] env[68040]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1932.153063] env[68040]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1932.153063] env[68040]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1932.153063] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1932.153063] env[68040]: ERROR oslo_vmware.rw_handles response.begin() [ 1932.153063] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1932.153063] env[68040]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1932.153063] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1932.153063] env[68040]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1932.153063] env[68040]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1932.153063] env[68040]: ERROR oslo_vmware.rw_handles [ 1932.153063] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Downloaded image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to vmware_temp/83237a80-b46d-406f-8acd-5555549e9aad/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1932.155253] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Caching image {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1932.155597] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Copying Virtual Disk [datastore2] vmware_temp/83237a80-b46d-406f-8acd-5555549e9aad/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk to [datastore2] vmware_temp/83237a80-b46d-406f-8acd-5555549e9aad/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk {{(pid=68040) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1932.155924] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2f861da4-ac56-444d-9b57-6cc92cd3bdc8 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.164121] env[68040]: DEBUG oslo_vmware.api [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Waiting for the task: (returnval){ [ 1932.164121] env[68040]: value = "task-3200333" [ 1932.164121] env[68040]: _type = "Task" [ 1932.164121] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1932.171772] env[68040]: DEBUG oslo_vmware.api [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Task: {'id': task-3200333, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1932.674119] env[68040]: DEBUG oslo_vmware.exceptions [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Fault InvalidArgument not matched. {{(pid=68040) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1932.674411] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1932.674975] env[68040]: ERROR nova.compute.manager [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1932.674975] env[68040]: Faults: ['InvalidArgument'] [ 1932.674975] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Traceback (most recent call last): [ 1932.674975] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1932.674975] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] yield resources [ 1932.674975] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1932.674975] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] self.driver.spawn(context, instance, image_meta, [ 1932.674975] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1932.674975] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1932.674975] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1932.674975] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] self._fetch_image_if_missing(context, vi) [ 1932.674975] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1932.675437] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] image_cache(vi, tmp_image_ds_loc) [ 1932.675437] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1932.675437] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] vm_util.copy_virtual_disk( [ 1932.675437] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1932.675437] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] session._wait_for_task(vmdk_copy_task) [ 1932.675437] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1932.675437] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] return self.wait_for_task(task_ref) [ 1932.675437] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1932.675437] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] return evt.wait() [ 1932.675437] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1932.675437] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] result = hub.switch() [ 1932.675437] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1932.675437] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] return self.greenlet.switch() [ 1932.675843] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1932.675843] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] self.f(*self.args, **self.kw) [ 1932.675843] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1932.675843] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] raise exceptions.translate_fault(task_info.error) [ 1932.675843] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1932.675843] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Faults: ['InvalidArgument'] [ 1932.675843] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] [ 1932.675843] env[68040]: INFO nova.compute.manager [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Terminating instance [ 1932.676856] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1932.677073] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1932.677312] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5c3321bd-969d-48de-a360-39005fc4c034 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.679385] env[68040]: DEBUG nova.compute.manager [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1932.679594] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1932.680312] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f13ac3c2-8cb0-403c-8d8c-6100567ee30e {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.687041] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Unregistering the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1932.687248] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3cad2844-3115-4256-a6f3-6d5fc0982c6d {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.689287] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1932.689461] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68040) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1932.690423] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9469071d-61f2-4753-94b9-4570ed7d995d {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.694945] env[68040]: DEBUG oslo_vmware.api [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Waiting for the task: (returnval){ [ 1932.694945] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]5231cc79-5234-9760-72a6-c9e09e21dc1a" [ 1932.694945] env[68040]: _type = "Task" [ 1932.694945] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1932.707636] env[68040]: DEBUG oslo_vmware.api [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]5231cc79-5234-9760-72a6-c9e09e21dc1a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1932.762428] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Unregistered the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1932.762655] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Deleting contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1932.762808] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Deleting the datastore file [datastore2] 4dfa01f8-53a0-4ee4-9b00-93017144ea0b {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1932.763109] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a501d13a-276c-45ae-8bc6-d172926d3d4a {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.769285] env[68040]: DEBUG oslo_vmware.api [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Waiting for the task: (returnval){ [ 1932.769285] env[68040]: value = "task-3200335" [ 1932.769285] env[68040]: _type = "Task" [ 1932.769285] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1932.776745] env[68040]: DEBUG oslo_vmware.api [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Task: {'id': task-3200335, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1933.205668] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Preparing fetch location {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1933.205668] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Creating directory with path [datastore2] vmware_temp/6e71c152-4435-473f-bd81-88b765db7a24/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1933.206122] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d708fe4b-b955-47c9-9a8f-2102b04166d7 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.217427] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Created directory with path [datastore2] vmware_temp/6e71c152-4435-473f-bd81-88b765db7a24/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1933.217626] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Fetch image to [datastore2] vmware_temp/6e71c152-4435-473f-bd81-88b765db7a24/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1933.217775] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to [datastore2] vmware_temp/6e71c152-4435-473f-bd81-88b765db7a24/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1933.218534] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8256bcc3-6a83-4b94-ad52-f3688437fdf2 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.224945] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e745a2e-5a97-45ed-b4fc-7b8a8000187c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.234863] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7055158e-f67c-43eb-a248-804176e3d26e {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.265664] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd823330-a6b6-4b59-aee6-0e770647dee0 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.273962] env[68040]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-041da2d1-59af-43b5-928d-e89959e0ef98 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.283019] env[68040]: DEBUG oslo_vmware.api [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Task: {'id': task-3200335, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.062386} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1933.283019] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Deleted the datastore file {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1933.283019] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Deleted contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1933.283019] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1933.283019] env[68040]: INFO nova.compute.manager [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1933.283401] env[68040]: DEBUG nova.compute.claims [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Aborting claim: {{(pid=68040) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1933.283401] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1933.283501] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1933.293424] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1933.437636] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1933.438432] env[68040]: ERROR nova.compute.manager [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image 8c308313-03d5-40b6-a5fe-9037e32dc76e. [ 1933.438432] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Traceback (most recent call last): [ 1933.438432] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1933.438432] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1933.438432] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1933.438432] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] result = getattr(controller, method)(*args, **kwargs) [ 1933.438432] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1933.438432] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] return self._get(image_id) [ 1933.438432] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1933.438432] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1933.438432] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1933.439046] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] resp, body = self.http_client.get(url, headers=header) [ 1933.439046] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1933.439046] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] return self.request(url, 'GET', **kwargs) [ 1933.439046] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1933.439046] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] return self._handle_response(resp) [ 1933.439046] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1933.439046] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] raise exc.from_response(resp, resp.content) [ 1933.439046] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1933.439046] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] [ 1933.439046] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] During handling of the above exception, another exception occurred: [ 1933.439046] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] [ 1933.439046] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Traceback (most recent call last): [ 1933.439566] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1933.439566] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] yield resources [ 1933.439566] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1933.439566] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] self.driver.spawn(context, instance, image_meta, [ 1933.439566] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1933.439566] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1933.439566] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1933.439566] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] self._fetch_image_if_missing(context, vi) [ 1933.439566] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1933.439566] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] image_fetch(context, vi, tmp_image_ds_loc) [ 1933.439566] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1933.439566] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] images.fetch_image( [ 1933.439566] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1933.439935] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] metadata = IMAGE_API.get(context, image_ref) [ 1933.439935] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1933.439935] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] return session.show(context, image_id, [ 1933.439935] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1933.439935] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] _reraise_translated_image_exception(image_id) [ 1933.439935] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1933.439935] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] raise new_exc.with_traceback(exc_trace) [ 1933.439935] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1933.439935] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1933.439935] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1933.439935] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] result = getattr(controller, method)(*args, **kwargs) [ 1933.439935] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1933.439935] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] return self._get(image_id) [ 1933.440327] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1933.440327] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1933.440327] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1933.440327] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] resp, body = self.http_client.get(url, headers=header) [ 1933.440327] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1933.440327] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] return self.request(url, 'GET', **kwargs) [ 1933.440327] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1933.440327] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] return self._handle_response(resp) [ 1933.440327] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1933.440327] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] raise exc.from_response(resp, resp.content) [ 1933.440327] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] nova.exception.ImageNotAuthorized: Not authorized for image 8c308313-03d5-40b6-a5fe-9037e32dc76e. [ 1933.440327] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] [ 1933.440832] env[68040]: INFO nova.compute.manager [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Terminating instance [ 1933.440832] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1933.440832] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1933.441322] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Acquiring lock "refresh_cache-f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1933.441322] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Acquired lock "refresh_cache-f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1933.441322] env[68040]: DEBUG nova.network.neutron [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Building network info cache for instance {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1933.442183] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7f86da8c-9c41-4386-9f8b-b5f4dbf5840b {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.452108] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1933.452334] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68040) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1933.453305] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-af4731c3-a1e8-4a4b-af71-f5d0d2df62c3 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.456620] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b88353ff-6c89-40f6-b4d8-27f07790f146 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.461507] env[68040]: DEBUG oslo_vmware.api [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Waiting for the task: (returnval){ [ 1933.461507] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]527d4029-24c1-fed5-8b66-f3b2294b9d52" [ 1933.461507] env[68040]: _type = "Task" [ 1933.461507] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1933.466545] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dfaee8f-f5be-410a-a26e-8c1d6954b136 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.469854] env[68040]: DEBUG nova.network.neutron [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1933.500307] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Preparing fetch location {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1933.500524] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Creating directory with path [datastore2] vmware_temp/54745197-08d6-4a9d-97bc-6128da801732/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1933.503115] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3bc2da73-778d-4ba9-a461-aa79399710fe {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.505311] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dffda4df-c696-49c7-bd1b-5bcc7e4bb58f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.512622] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dcc8f9f-84b4-46a5-b3c9-be59e3131bb9 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.525401] env[68040]: DEBUG nova.compute.provider_tree [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1933.527388] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Created directory with path [datastore2] vmware_temp/54745197-08d6-4a9d-97bc-6128da801732/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1933.527580] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Fetch image to [datastore2] vmware_temp/54745197-08d6-4a9d-97bc-6128da801732/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1933.527752] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to [datastore2] vmware_temp/54745197-08d6-4a9d-97bc-6128da801732/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1933.528650] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6912969-3da8-4839-a5c4-05cc13d1160b {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.535475] env[68040]: DEBUG nova.scheduler.client.report [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1933.538706] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f0e09aa-6891-4781-8bd2-803efcd28fbf {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.548398] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94f443e7-a1c3-42e7-8e6a-ca8f42b7e45d {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.553112] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.270s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1933.553651] env[68040]: ERROR nova.compute.manager [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1933.553651] env[68040]: Faults: ['InvalidArgument'] [ 1933.553651] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Traceback (most recent call last): [ 1933.553651] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1933.553651] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] self.driver.spawn(context, instance, image_meta, [ 1933.553651] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1933.553651] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1933.553651] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1933.553651] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] self._fetch_image_if_missing(context, vi) [ 1933.553651] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1933.553651] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] image_cache(vi, tmp_image_ds_loc) [ 1933.553651] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1933.554092] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] vm_util.copy_virtual_disk( [ 1933.554092] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1933.554092] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] session._wait_for_task(vmdk_copy_task) [ 1933.554092] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1933.554092] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] return self.wait_for_task(task_ref) [ 1933.554092] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1933.554092] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] return evt.wait() [ 1933.554092] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1933.554092] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] result = hub.switch() [ 1933.554092] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1933.554092] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] return self.greenlet.switch() [ 1933.554092] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1933.554092] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] self.f(*self.args, **self.kw) [ 1933.554504] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1933.554504] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] raise exceptions.translate_fault(task_info.error) [ 1933.554504] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1933.554504] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Faults: ['InvalidArgument'] [ 1933.554504] env[68040]: ERROR nova.compute.manager [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] [ 1933.554504] env[68040]: DEBUG nova.compute.utils [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] VimFaultException {{(pid=68040) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1933.556098] env[68040]: DEBUG nova.compute.manager [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Build of instance 4dfa01f8-53a0-4ee4-9b00-93017144ea0b was re-scheduled: A specified parameter was not correct: fileType [ 1933.556098] env[68040]: Faults: ['InvalidArgument'] {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1933.556464] env[68040]: DEBUG nova.compute.manager [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Unplugging VIFs for instance {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1933.556635] env[68040]: DEBUG nova.compute.manager [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1933.556807] env[68040]: DEBUG nova.compute.manager [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1933.556969] env[68040]: DEBUG nova.network.neutron [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1933.587020] env[68040]: DEBUG nova.network.neutron [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1933.587020] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52c57828-c186-4cfb-961c-c16c8920abdd {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.592168] env[68040]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-5f3c6e12-95fe-4770-810e-b768bff34dd2 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.595227] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Releasing lock "refresh_cache-f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1933.595227] env[68040]: DEBUG nova.compute.manager [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1933.595227] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1933.596754] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-809742af-d5f5-4bfd-974e-010697585e22 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.603098] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Unregistering the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1933.603559] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7ef5abd6-ffac-4241-99b0-d08664e84ffd {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.614121] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1933.632825] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Unregistered the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1933.632825] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Deleting contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1933.632825] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Deleting the datastore file [datastore2] f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2 {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1933.632825] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fd2f117a-369c-459a-8fbf-f5029e7156ad {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.638971] env[68040]: DEBUG oslo_vmware.api [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Waiting for the task: (returnval){ [ 1933.638971] env[68040]: value = "task-3200337" [ 1933.638971] env[68040]: _type = "Task" [ 1933.638971] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1933.649296] env[68040]: DEBUG oslo_vmware.api [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Task: {'id': task-3200337, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1933.672132] env[68040]: DEBUG oslo_vmware.rw_handles [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/54745197-08d6-4a9d-97bc-6128da801732/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1933.732510] env[68040]: DEBUG oslo_vmware.rw_handles [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Completed reading data from the image iterator. {{(pid=68040) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1933.732698] env[68040]: DEBUG oslo_vmware.rw_handles [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/54745197-08d6-4a9d-97bc-6128da801732/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1933.921218] env[68040]: DEBUG nova.network.neutron [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1933.931576] env[68040]: INFO nova.compute.manager [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Took 0.37 seconds to deallocate network for instance. [ 1934.020062] env[68040]: INFO nova.scheduler.client.report [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Deleted allocations for instance 4dfa01f8-53a0-4ee4-9b00-93017144ea0b [ 1934.044184] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ca06cd18-78e1-41af-8b57-293cadeff52d tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Lock "4dfa01f8-53a0-4ee4-9b00-93017144ea0b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 653.872s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1934.044453] env[68040]: DEBUG oslo_concurrency.lockutils [None req-c34bbd03-eddd-4ae7-b6cc-387e2829408c tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Lock "4dfa01f8-53a0-4ee4-9b00-93017144ea0b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 456.667s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1934.044704] env[68040]: DEBUG oslo_concurrency.lockutils [None req-c34bbd03-eddd-4ae7-b6cc-387e2829408c tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Acquiring lock "4dfa01f8-53a0-4ee4-9b00-93017144ea0b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1934.044928] env[68040]: DEBUG oslo_concurrency.lockutils [None req-c34bbd03-eddd-4ae7-b6cc-387e2829408c tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Lock "4dfa01f8-53a0-4ee4-9b00-93017144ea0b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1934.045116] env[68040]: DEBUG oslo_concurrency.lockutils [None req-c34bbd03-eddd-4ae7-b6cc-387e2829408c tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Lock "4dfa01f8-53a0-4ee4-9b00-93017144ea0b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1934.047272] env[68040]: INFO nova.compute.manager [None req-c34bbd03-eddd-4ae7-b6cc-387e2829408c tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Terminating instance [ 1934.049330] env[68040]: DEBUG nova.compute.manager [None req-c34bbd03-eddd-4ae7-b6cc-387e2829408c tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1934.049432] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-c34bbd03-eddd-4ae7-b6cc-387e2829408c tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1934.049996] env[68040]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d35d7fe7-33bd-4344-acd2-71c503ef9acb {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.060126] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56e6d219-e174-4542-9ffb-617fb70ccfc4 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.088143] env[68040]: WARNING nova.virt.vmwareapi.vmops [None req-c34bbd03-eddd-4ae7-b6cc-387e2829408c tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4dfa01f8-53a0-4ee4-9b00-93017144ea0b could not be found. [ 1934.088392] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-c34bbd03-eddd-4ae7-b6cc-387e2829408c tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1934.088583] env[68040]: INFO nova.compute.manager [None req-c34bbd03-eddd-4ae7-b6cc-387e2829408c tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1934.088870] env[68040]: DEBUG oslo.service.loopingcall [None req-c34bbd03-eddd-4ae7-b6cc-387e2829408c tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1934.089129] env[68040]: DEBUG nova.compute.manager [-] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1934.089230] env[68040]: DEBUG nova.network.neutron [-] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1934.112786] env[68040]: DEBUG nova.network.neutron [-] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1934.121269] env[68040]: INFO nova.compute.manager [-] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] Took 0.03 seconds to deallocate network for instance. [ 1934.149636] env[68040]: DEBUG oslo_vmware.api [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Task: {'id': task-3200337, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.030009} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1934.149887] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Deleted the datastore file {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1934.150097] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Deleted contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1934.150276] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1934.150453] env[68040]: INFO nova.compute.manager [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Took 0.56 seconds to destroy the instance on the hypervisor. [ 1934.150710] env[68040]: DEBUG oslo.service.loopingcall [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1934.150963] env[68040]: DEBUG nova.compute.manager [-] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Skipping network deallocation for instance since networking was not requested. {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 1934.153630] env[68040]: DEBUG nova.compute.claims [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Aborting claim: {{(pid=68040) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1934.154724] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1934.154724] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1934.235725] env[68040]: DEBUG oslo_concurrency.lockutils [None req-c34bbd03-eddd-4ae7-b6cc-387e2829408c tempest-ServerRescueNegativeTestJSON-1099594005 tempest-ServerRescueNegativeTestJSON-1099594005-project-member] Lock "4dfa01f8-53a0-4ee4-9b00-93017144ea0b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.191s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1934.237470] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "4dfa01f8-53a0-4ee4-9b00-93017144ea0b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 99.049s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1934.237691] env[68040]: INFO nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 4dfa01f8-53a0-4ee4-9b00-93017144ea0b] During sync_power_state the instance has a pending task (deleting). Skip. [ 1934.237886] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "4dfa01f8-53a0-4ee4-9b00-93017144ea0b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1934.337619] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f13677e-61f3-4457-904d-7f8966c112d3 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.345801] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc2913d4-4973-4dfd-8dc7-80fb67ff2764 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.375742] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68441668-bf3f-4970-856a-5330f2476d1a {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.382960] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d5ee132-6702-45a3-ae08-98db2a56f451 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.396324] env[68040]: DEBUG nova.compute.provider_tree [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1934.404210] env[68040]: DEBUG nova.scheduler.client.report [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1934.419139] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.265s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1934.419838] env[68040]: ERROR nova.compute.manager [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image 8c308313-03d5-40b6-a5fe-9037e32dc76e. [ 1934.419838] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Traceback (most recent call last): [ 1934.419838] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1934.419838] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1934.419838] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1934.419838] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] result = getattr(controller, method)(*args, **kwargs) [ 1934.419838] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1934.419838] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] return self._get(image_id) [ 1934.419838] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1934.419838] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1934.419838] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1934.420172] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] resp, body = self.http_client.get(url, headers=header) [ 1934.420172] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1934.420172] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] return self.request(url, 'GET', **kwargs) [ 1934.420172] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1934.420172] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] return self._handle_response(resp) [ 1934.420172] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1934.420172] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] raise exc.from_response(resp, resp.content) [ 1934.420172] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1934.420172] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] [ 1934.420172] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] During handling of the above exception, another exception occurred: [ 1934.420172] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] [ 1934.420172] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Traceback (most recent call last): [ 1934.420584] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1934.420584] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] self.driver.spawn(context, instance, image_meta, [ 1934.420584] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1934.420584] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1934.420584] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1934.420584] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] self._fetch_image_if_missing(context, vi) [ 1934.420584] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1934.420584] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] image_fetch(context, vi, tmp_image_ds_loc) [ 1934.420584] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1934.420584] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] images.fetch_image( [ 1934.420584] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1934.420584] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] metadata = IMAGE_API.get(context, image_ref) [ 1934.420584] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1934.420930] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] return session.show(context, image_id, [ 1934.420930] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1934.420930] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] _reraise_translated_image_exception(image_id) [ 1934.420930] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1934.420930] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] raise new_exc.with_traceback(exc_trace) [ 1934.420930] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1934.420930] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1934.420930] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1934.420930] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] result = getattr(controller, method)(*args, **kwargs) [ 1934.420930] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1934.420930] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] return self._get(image_id) [ 1934.420930] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1934.420930] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1934.421297] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1934.421297] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] resp, body = self.http_client.get(url, headers=header) [ 1934.421297] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 393, in get [ 1934.421297] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] return self.request(url, 'GET', **kwargs) [ 1934.421297] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1934.421297] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] return self._handle_response(resp) [ 1934.421297] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1934.421297] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] raise exc.from_response(resp, resp.content) [ 1934.421297] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] nova.exception.ImageNotAuthorized: Not authorized for image 8c308313-03d5-40b6-a5fe-9037e32dc76e. [ 1934.421297] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] [ 1934.421297] env[68040]: DEBUG nova.compute.utils [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Not authorized for image 8c308313-03d5-40b6-a5fe-9037e32dc76e. {{(pid=68040) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1934.422437] env[68040]: DEBUG nova.compute.manager [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Build of instance f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2 was re-scheduled: Not authorized for image 8c308313-03d5-40b6-a5fe-9037e32dc76e. {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1934.422873] env[68040]: DEBUG nova.compute.manager [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Unplugging VIFs for instance {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1934.423113] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Acquiring lock "refresh_cache-f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1934.423265] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Acquired lock "refresh_cache-f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1934.423435] env[68040]: DEBUG nova.network.neutron [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Building network info cache for instance {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1934.448303] env[68040]: DEBUG nova.network.neutron [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1934.510770] env[68040]: DEBUG nova.network.neutron [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1934.519584] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Releasing lock "refresh_cache-f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1934.519820] env[68040]: DEBUG nova.compute.manager [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1934.520013] env[68040]: DEBUG nova.compute.manager [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Skipping network deallocation for instance since networking was not requested. {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 1934.603088] env[68040]: INFO nova.scheduler.client.report [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Deleted allocations for instance f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2 [ 1934.617997] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4d1d0773-b1c9-46ac-ae87-1bfcdde08261 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Lock "f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 619.272s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1934.618257] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e9d49cef-5cff-476b-9ebb-9e52de575d93 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Lock "f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 422.445s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1934.618469] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e9d49cef-5cff-476b-9ebb-9e52de575d93 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Acquiring lock "f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1934.618676] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e9d49cef-5cff-476b-9ebb-9e52de575d93 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Lock "f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1934.618872] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e9d49cef-5cff-476b-9ebb-9e52de575d93 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Lock "f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1934.620654] env[68040]: INFO nova.compute.manager [None req-e9d49cef-5cff-476b-9ebb-9e52de575d93 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Terminating instance [ 1934.622219] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e9d49cef-5cff-476b-9ebb-9e52de575d93 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Acquiring lock "refresh_cache-f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1934.622380] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e9d49cef-5cff-476b-9ebb-9e52de575d93 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Acquired lock "refresh_cache-f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1934.622549] env[68040]: DEBUG nova.network.neutron [None req-e9d49cef-5cff-476b-9ebb-9e52de575d93 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Building network info cache for instance {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1934.651620] env[68040]: DEBUG nova.network.neutron [None req-e9d49cef-5cff-476b-9ebb-9e52de575d93 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1934.710148] env[68040]: DEBUG nova.network.neutron [None req-e9d49cef-5cff-476b-9ebb-9e52de575d93 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1934.718594] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e9d49cef-5cff-476b-9ebb-9e52de575d93 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Releasing lock "refresh_cache-f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1934.718980] env[68040]: DEBUG nova.compute.manager [None req-e9d49cef-5cff-476b-9ebb-9e52de575d93 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1934.719190] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-e9d49cef-5cff-476b-9ebb-9e52de575d93 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1934.719687] env[68040]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ec6ff159-c309-45be-bb89-3b44ab2e21a4 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.728797] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23029a78-a2b0-45af-a65a-d12fb399e7f5 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.755616] env[68040]: WARNING nova.virt.vmwareapi.vmops [None req-e9d49cef-5cff-476b-9ebb-9e52de575d93 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2 could not be found. [ 1934.755769] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-e9d49cef-5cff-476b-9ebb-9e52de575d93 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1934.755954] env[68040]: INFO nova.compute.manager [None req-e9d49cef-5cff-476b-9ebb-9e52de575d93 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1934.756212] env[68040]: DEBUG oslo.service.loopingcall [None req-e9d49cef-5cff-476b-9ebb-9e52de575d93 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1934.756430] env[68040]: DEBUG nova.compute.manager [-] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1934.756526] env[68040]: DEBUG nova.network.neutron [-] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1934.876747] env[68040]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=68040) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1934.877088] env[68040]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1934.877879] env[68040]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1934.877879] env[68040]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1934.877879] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1934.877879] env[68040]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1934.877879] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1934.877879] env[68040]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1934.877879] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1934.877879] env[68040]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1934.877879] env[68040]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1934.877879] env[68040]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-7dbea69a-7ea0-4a44-ac47-1928eeb8fc79'] [ 1934.877879] env[68040]: ERROR oslo.service.loopingcall [ 1934.877879] env[68040]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1934.877879] env[68040]: ERROR oslo.service.loopingcall [ 1934.877879] env[68040]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1934.877879] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1934.877879] env[68040]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1934.878381] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1934.878381] env[68040]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1934.878381] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1934.878381] env[68040]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1934.878381] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1934.878381] env[68040]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1934.878381] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1934.878381] env[68040]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1934.878381] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1934.878381] env[68040]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1934.878381] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1934.878381] env[68040]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1934.878381] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1934.878381] env[68040]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1934.878381] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1934.878381] env[68040]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1934.878381] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1934.878381] env[68040]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1934.878886] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1934.878886] env[68040]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1934.878886] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1934.878886] env[68040]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1934.878886] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1934.878886] env[68040]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1934.878886] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1934.878886] env[68040]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1934.878886] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1934.878886] env[68040]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1934.878886] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1934.878886] env[68040]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1934.878886] env[68040]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1934.878886] env[68040]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1934.878886] env[68040]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1934.878886] env[68040]: ERROR oslo.service.loopingcall [ 1934.879399] env[68040]: ERROR nova.compute.manager [None req-e9d49cef-5cff-476b-9ebb-9e52de575d93 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1934.905856] env[68040]: ERROR nova.compute.manager [None req-e9d49cef-5cff-476b-9ebb-9e52de575d93 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1934.905856] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Traceback (most recent call last): [ 1934.905856] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1934.905856] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] ret = obj(*args, **kwargs) [ 1934.905856] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1934.905856] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] exception_handler_v20(status_code, error_body) [ 1934.905856] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1934.905856] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] raise client_exc(message=error_message, [ 1934.905856] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1934.905856] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Neutron server returns request_ids: ['req-7dbea69a-7ea0-4a44-ac47-1928eeb8fc79'] [ 1934.905856] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] [ 1934.906340] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] During handling of the above exception, another exception occurred: [ 1934.906340] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] [ 1934.906340] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Traceback (most recent call last): [ 1934.906340] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1934.906340] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] self._delete_instance(context, instance, bdms) [ 1934.906340] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1934.906340] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] self._shutdown_instance(context, instance, bdms) [ 1934.906340] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1934.906340] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] self._try_deallocate_network(context, instance, requested_networks) [ 1934.906340] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1934.906340] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] with excutils.save_and_reraise_exception(): [ 1934.906340] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1934.906340] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] self.force_reraise() [ 1934.906769] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1934.906769] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] raise self.value [ 1934.906769] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1934.906769] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] _deallocate_network_with_retries() [ 1934.906769] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1934.906769] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] return evt.wait() [ 1934.906769] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1934.906769] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] result = hub.switch() [ 1934.906769] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1934.906769] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] return self.greenlet.switch() [ 1934.906769] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1934.906769] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] result = func(*self.args, **self.kw) [ 1934.907167] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1934.907167] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] result = f(*args, **kwargs) [ 1934.907167] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1934.907167] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] self._deallocate_network( [ 1934.907167] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1934.907167] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] self.network_api.deallocate_for_instance( [ 1934.907167] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1934.907167] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] data = neutron.list_ports(**search_opts) [ 1934.907167] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1934.907167] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] ret = obj(*args, **kwargs) [ 1934.907167] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1934.907167] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] return self.list('ports', self.ports_path, retrieve_all, [ 1934.907167] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1934.907598] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] ret = obj(*args, **kwargs) [ 1934.907598] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1934.907598] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] for r in self._pagination(collection, path, **params): [ 1934.907598] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1934.907598] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] res = self.get(path, params=params) [ 1934.907598] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1934.907598] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] ret = obj(*args, **kwargs) [ 1934.907598] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1934.907598] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] return self.retry_request("GET", action, body=body, [ 1934.907598] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1934.907598] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] ret = obj(*args, **kwargs) [ 1934.907598] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1934.907598] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] return self.do_request(method, action, body=body, [ 1934.907993] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1934.907993] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] ret = obj(*args, **kwargs) [ 1934.907993] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1934.907993] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] self._handle_fault_response(status_code, replybody, resp) [ 1934.907993] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1934.907993] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1934.907993] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1934.907993] env[68040]: ERROR nova.compute.manager [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] [ 1934.932960] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e9d49cef-5cff-476b-9ebb-9e52de575d93 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Lock "f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.315s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1934.934029] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 99.745s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1934.934217] env[68040]: INFO nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] During sync_power_state the instance has a pending task (deleting). Skip. [ 1934.934409] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1934.976327] env[68040]: INFO nova.compute.manager [None req-e9d49cef-5cff-476b-9ebb-9e52de575d93 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] [instance: f4367cb6-eb24-49f4-9c4f-ebcd4153a7a2] Successfully reverted task state from None on failure for instance. [ 1934.979467] env[68040]: ERROR oslo_messaging.rpc.server [None req-e9d49cef-5cff-476b-9ebb-9e52de575d93 tempest-ServerShowV247Test-556552756 tempest-ServerShowV247Test-556552756-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1934.979467] env[68040]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1934.979467] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1934.979467] env[68040]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1934.979467] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1934.979467] env[68040]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1934.979467] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1934.979467] env[68040]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1934.979467] env[68040]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1934.979467] env[68040]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-7dbea69a-7ea0-4a44-ac47-1928eeb8fc79'] [ 1934.979467] env[68040]: ERROR oslo_messaging.rpc.server [ 1934.979467] env[68040]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1934.979467] env[68040]: ERROR oslo_messaging.rpc.server [ 1934.979467] env[68040]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1934.979467] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1934.979467] env[68040]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1934.980097] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1934.980097] env[68040]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1934.980097] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1934.980097] env[68040]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1934.980097] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1934.980097] env[68040]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1934.980097] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1934.980097] env[68040]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1934.980097] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1934.980097] env[68040]: ERROR oslo_messaging.rpc.server raise self.value [ 1934.980097] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1934.980097] env[68040]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1934.980097] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1934.980097] env[68040]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1934.980097] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1934.980097] env[68040]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1934.980097] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1934.980097] env[68040]: ERROR oslo_messaging.rpc.server raise self.value [ 1934.980684] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1934.980684] env[68040]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1934.980684] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1934.980684] env[68040]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1934.980684] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1934.980684] env[68040]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1934.980684] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1934.980684] env[68040]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1934.980684] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1934.980684] env[68040]: ERROR oslo_messaging.rpc.server raise self.value [ 1934.980684] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1934.980684] env[68040]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1934.980684] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3344, in terminate_instance [ 1934.980684] env[68040]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1934.980684] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 412, in inner [ 1934.980684] env[68040]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1934.980684] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3339, in do_terminate_instance [ 1934.980684] env[68040]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1934.981309] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1934.981309] env[68040]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1934.981309] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1934.981309] env[68040]: ERROR oslo_messaging.rpc.server raise self.value [ 1934.981309] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3332, in do_terminate_instance [ 1934.981309] env[68040]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1934.981309] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3267, in _delete_instance [ 1934.981309] env[68040]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1934.981309] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3161, in _shutdown_instance [ 1934.981309] env[68040]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1934.981309] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3075, in _try_deallocate_network [ 1934.981309] env[68040]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1934.981309] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1934.981309] env[68040]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1934.981309] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1934.981309] env[68040]: ERROR oslo_messaging.rpc.server raise self.value [ 1934.981309] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3073, in _try_deallocate_network [ 1934.981309] env[68040]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1934.981871] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1934.981871] env[68040]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1934.981871] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1934.981871] env[68040]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1934.981871] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1934.981871] env[68040]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1934.981871] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1934.981871] env[68040]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1934.981871] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1934.981871] env[68040]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1934.981871] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3062, in _deallocate_network_with_retries [ 1934.981871] env[68040]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1934.981871] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2282, in _deallocate_network [ 1934.981871] env[68040]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1934.981871] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1934.981871] env[68040]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1934.981871] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1934.981871] env[68040]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1934.982438] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1934.982438] env[68040]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1934.982438] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1934.982438] env[68040]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1934.982438] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1934.982438] env[68040]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1934.982438] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1934.982438] env[68040]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1934.982438] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1934.982438] env[68040]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1934.982438] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1934.982438] env[68040]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1934.982438] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1934.982438] env[68040]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1934.982438] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1934.982438] env[68040]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1934.982438] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1934.982438] env[68040]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1934.983122] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1934.983122] env[68040]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1934.983122] env[68040]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1934.983122] env[68040]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1934.983122] env[68040]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1934.983122] env[68040]: ERROR oslo_messaging.rpc.server [ 1935.984152] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1939.985053] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1939.985053] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Starting heal instance info cache {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1939.985053] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Rebuilding the list of instances to heal {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1940.003986] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1940.004186] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1940.004311] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1940.004417] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1940.004543] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1940.004701] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1940.004842] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1940.004962] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1940.005097] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Didn't find any instances for network info cache update. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1940.005578] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1940.005757] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1940.016399] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1940.016623] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1940.016793] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1940.016945] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68040) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1940.018319] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6a9e13f-fa45-4a53-bc5e-c84924a19824 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.026680] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-175d37a5-a029-42f4-9bc4-0ec261432289 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.040103] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63da7d45-2916-4322-80a4-b0baa6821a9c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.046177] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-052f61a3-f812-491e-beea-5decc04d6a46 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.075677] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180963MB free_disk=125GB free_vcpus=48 pci_devices=None {{(pid=68040) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1940.075817] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1940.076017] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1940.139670] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1940.139839] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 221a5bbe-7168-4f5c-ab49-8a149545655f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1940.139971] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 031481de-d52f-4f3f-80e5-0d0d6803d624 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1940.140109] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 2e44ead1-4676-4d9b-bbae-5082f505fc8b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1940.140232] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 0210d9d4-2161-4b06-bc81-9de361accca6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1940.140352] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 8f9a6934-9ded-4561-8d83-aacd4d79f29a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1940.140468] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 8a1a6866-1439-4f82-9fda-a7d9a7f211a3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1940.140583] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 92b0f3c0-2c87-478d-8b11-f0b05aee12ed actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1940.140770] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1940.140909] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1536MB phys_disk=200GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1940.156098] env[68040]: DEBUG nova.scheduler.client.report [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Refreshing inventories for resource provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 1940.168139] env[68040]: DEBUG nova.scheduler.client.report [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Updating ProviderTree inventory for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 1940.168316] env[68040]: DEBUG nova.compute.provider_tree [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Updating inventory in ProviderTree for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1940.177914] env[68040]: DEBUG nova.scheduler.client.report [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Refreshing aggregate associations for resource provider 22db6f73-b3da-436a-bf40-9c8c240b2e44, aggregates: None {{(pid=68040) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 1940.195039] env[68040]: DEBUG nova.scheduler.client.report [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Refreshing trait associations for resource provider 22db6f73-b3da-436a-bf40-9c8c240b2e44, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=68040) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 1940.293973] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ade0af65-6ba7-40cc-b790-a595cec0ee35 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.301773] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56b4e147-d91d-4771-9634-fba24eab1f9a {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.331457] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62ec8499-d239-42e0-b3dc-449c84e5e380 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.339327] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1a95dbd-b229-41d7-9067-a149b892975e {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.353222] env[68040]: DEBUG nova.compute.provider_tree [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1940.362285] env[68040]: DEBUG nova.scheduler.client.report [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1940.376438] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68040) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1940.376645] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.301s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1941.355496] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1941.984584] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1942.983558] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1942.983967] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68040) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1945.980525] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1951.979787] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1979.374938] env[68040]: WARNING oslo_vmware.rw_handles [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1979.374938] env[68040]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1979.374938] env[68040]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1979.374938] env[68040]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1979.374938] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1979.374938] env[68040]: ERROR oslo_vmware.rw_handles response.begin() [ 1979.374938] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1979.374938] env[68040]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1979.374938] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1979.374938] env[68040]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1979.374938] env[68040]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1979.374938] env[68040]: ERROR oslo_vmware.rw_handles [ 1979.375629] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Downloaded image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to vmware_temp/54745197-08d6-4a9d-97bc-6128da801732/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1979.377382] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Caching image {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1979.377626] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Copying Virtual Disk [datastore2] vmware_temp/54745197-08d6-4a9d-97bc-6128da801732/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk to [datastore2] vmware_temp/54745197-08d6-4a9d-97bc-6128da801732/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk {{(pid=68040) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1979.377913] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3b450985-337f-4675-8d83-ea88f5532b79 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.385527] env[68040]: DEBUG oslo_vmware.api [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Waiting for the task: (returnval){ [ 1979.385527] env[68040]: value = "task-3200338" [ 1979.385527] env[68040]: _type = "Task" [ 1979.385527] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1979.393664] env[68040]: DEBUG oslo_vmware.api [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Task: {'id': task-3200338, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1979.896444] env[68040]: DEBUG oslo_vmware.exceptions [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Fault InvalidArgument not matched. {{(pid=68040) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1979.896730] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1979.897311] env[68040]: ERROR nova.compute.manager [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1979.897311] env[68040]: Faults: ['InvalidArgument'] [ 1979.897311] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Traceback (most recent call last): [ 1979.897311] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1979.897311] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] yield resources [ 1979.897311] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1979.897311] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] self.driver.spawn(context, instance, image_meta, [ 1979.897311] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1979.897311] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1979.897311] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1979.897311] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] self._fetch_image_if_missing(context, vi) [ 1979.897311] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1979.897782] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] image_cache(vi, tmp_image_ds_loc) [ 1979.897782] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1979.897782] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] vm_util.copy_virtual_disk( [ 1979.897782] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1979.897782] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] session._wait_for_task(vmdk_copy_task) [ 1979.897782] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1979.897782] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] return self.wait_for_task(task_ref) [ 1979.897782] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1979.897782] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] return evt.wait() [ 1979.897782] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1979.897782] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] result = hub.switch() [ 1979.897782] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1979.897782] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] return self.greenlet.switch() [ 1979.898245] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1979.898245] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] self.f(*self.args, **self.kw) [ 1979.898245] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1979.898245] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] raise exceptions.translate_fault(task_info.error) [ 1979.898245] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1979.898245] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Faults: ['InvalidArgument'] [ 1979.898245] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] [ 1979.898245] env[68040]: INFO nova.compute.manager [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Terminating instance [ 1979.899167] env[68040]: DEBUG oslo_concurrency.lockutils [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1979.899376] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1979.899600] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dd25e299-0b85-4b54-9fc2-06f623dce8ac {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.901660] env[68040]: DEBUG nova.compute.manager [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1979.901877] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1979.902605] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25c81c70-e186-4a2e-bef9-22f1fa519617 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.909352] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Unregistering the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1979.909555] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-75bf27d7-1389-4bd5-9170-b3a141be6854 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.911605] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1979.911806] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68040) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1979.912744] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bcf296f5-f3a6-4120-86be-7b3d6876f234 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.917299] env[68040]: DEBUG oslo_vmware.api [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Waiting for the task: (returnval){ [ 1979.917299] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]524742e3-3f2e-d53b-6bd6-071243ee9c8c" [ 1979.917299] env[68040]: _type = "Task" [ 1979.917299] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1979.924516] env[68040]: DEBUG oslo_vmware.api [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]524742e3-3f2e-d53b-6bd6-071243ee9c8c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1979.970771] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Unregistered the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1979.971041] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Deleting contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1979.971187] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Deleting the datastore file [datastore2] c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1979.971464] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d18d31c2-489f-4428-95f8-0f0d40501bcc {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.977942] env[68040]: DEBUG oslo_vmware.api [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Waiting for the task: (returnval){ [ 1979.977942] env[68040]: value = "task-3200340" [ 1979.977942] env[68040]: _type = "Task" [ 1979.977942] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1979.985304] env[68040]: DEBUG oslo_vmware.api [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Task: {'id': task-3200340, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1980.428072] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Preparing fetch location {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1980.428461] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Creating directory with path [datastore2] vmware_temp/39e778b2-eb22-4b03-9eae-538a762130a1/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1980.428589] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e318a444-2f8b-414f-9729-895b4289f939 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.439646] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Created directory with path [datastore2] vmware_temp/39e778b2-eb22-4b03-9eae-538a762130a1/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1980.439832] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Fetch image to [datastore2] vmware_temp/39e778b2-eb22-4b03-9eae-538a762130a1/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1980.440011] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to [datastore2] vmware_temp/39e778b2-eb22-4b03-9eae-538a762130a1/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1980.440705] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d45dc5b4-50c7-4f9e-a0ea-7bb429b5dfd9 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.447076] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca83b4d4-b627-4c71-912c-40560b2521bd {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.456953] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5444645-c255-431c-87e6-e2bdf0c7d51a {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.489815] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ee58ea5-75aa-4f3e-8410-e38ae17cdba2 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.496730] env[68040]: DEBUG oslo_vmware.api [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Task: {'id': task-3200340, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.065221} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1980.498129] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Deleted the datastore file {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1980.498329] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Deleted contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1980.498504] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1980.498681] env[68040]: INFO nova.compute.manager [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1980.500726] env[68040]: DEBUG nova.compute.claims [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Aborting claim: {{(pid=68040) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1980.500895] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1980.501129] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1980.503685] env[68040]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-392cd403-4818-4d18-bd7d-79c552a49df5 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.524846] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1980.589019] env[68040]: DEBUG oslo_vmware.rw_handles [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/39e778b2-eb22-4b03-9eae-538a762130a1/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1980.650401] env[68040]: DEBUG oslo_vmware.rw_handles [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Completed reading data from the image iterator. {{(pid=68040) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1980.650701] env[68040]: DEBUG oslo_vmware.rw_handles [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/39e778b2-eb22-4b03-9eae-538a762130a1/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1980.700352] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07016640-21ff-4ead-be8a-63cff561312f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.707863] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e232b186-564d-4cb2-ab5e-7a3892c30002 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.738251] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-073539c9-ca26-442d-a545-f8cb58b6ef9f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.744777] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81b0fba2-d749-4f45-99b0-88705f70753f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.757442] env[68040]: DEBUG nova.compute.provider_tree [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1980.765806] env[68040]: DEBUG nova.scheduler.client.report [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1980.779192] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.278s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1980.779685] env[68040]: ERROR nova.compute.manager [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1980.779685] env[68040]: Faults: ['InvalidArgument'] [ 1980.779685] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Traceback (most recent call last): [ 1980.779685] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1980.779685] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] self.driver.spawn(context, instance, image_meta, [ 1980.779685] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1980.779685] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1980.779685] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1980.779685] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] self._fetch_image_if_missing(context, vi) [ 1980.779685] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1980.779685] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] image_cache(vi, tmp_image_ds_loc) [ 1980.779685] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1980.780111] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] vm_util.copy_virtual_disk( [ 1980.780111] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1980.780111] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] session._wait_for_task(vmdk_copy_task) [ 1980.780111] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1980.780111] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] return self.wait_for_task(task_ref) [ 1980.780111] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1980.780111] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] return evt.wait() [ 1980.780111] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1980.780111] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] result = hub.switch() [ 1980.780111] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1980.780111] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] return self.greenlet.switch() [ 1980.780111] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1980.780111] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] self.f(*self.args, **self.kw) [ 1980.780501] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1980.780501] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] raise exceptions.translate_fault(task_info.error) [ 1980.780501] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1980.780501] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Faults: ['InvalidArgument'] [ 1980.780501] env[68040]: ERROR nova.compute.manager [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] [ 1980.780501] env[68040]: DEBUG nova.compute.utils [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] VimFaultException {{(pid=68040) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1980.781743] env[68040]: DEBUG nova.compute.manager [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Build of instance c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a was re-scheduled: A specified parameter was not correct: fileType [ 1980.781743] env[68040]: Faults: ['InvalidArgument'] {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1980.782123] env[68040]: DEBUG nova.compute.manager [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Unplugging VIFs for instance {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1980.782338] env[68040]: DEBUG nova.compute.manager [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1980.782543] env[68040]: DEBUG nova.compute.manager [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1980.782714] env[68040]: DEBUG nova.network.neutron [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1981.081110] env[68040]: DEBUG nova.network.neutron [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1981.096429] env[68040]: INFO nova.compute.manager [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Took 0.31 seconds to deallocate network for instance. [ 1981.192732] env[68040]: INFO nova.scheduler.client.report [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Deleted allocations for instance c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a [ 1981.214803] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e54b2c96-4550-4e9e-b042-92cffaa4ecb1 tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Lock "c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 527.294s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1981.215097] env[68040]: DEBUG oslo_concurrency.lockutils [None req-3f6d5799-130b-400f-a44b-e36b3755cdae tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Lock "c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 331.723s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1981.215323] env[68040]: DEBUG oslo_concurrency.lockutils [None req-3f6d5799-130b-400f-a44b-e36b3755cdae tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Acquiring lock "c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1981.215537] env[68040]: DEBUG oslo_concurrency.lockutils [None req-3f6d5799-130b-400f-a44b-e36b3755cdae tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Lock "c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1981.215704] env[68040]: DEBUG oslo_concurrency.lockutils [None req-3f6d5799-130b-400f-a44b-e36b3755cdae tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Lock "c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1981.217607] env[68040]: INFO nova.compute.manager [None req-3f6d5799-130b-400f-a44b-e36b3755cdae tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Terminating instance [ 1981.219290] env[68040]: DEBUG nova.compute.manager [None req-3f6d5799-130b-400f-a44b-e36b3755cdae tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1981.219543] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-3f6d5799-130b-400f-a44b-e36b3755cdae tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1981.220026] env[68040]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-29597fda-ea06-4cb5-a392-4e4f02ae84b6 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.228899] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-127cc473-2adc-4e66-9577-d47e342c78f9 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.256610] env[68040]: WARNING nova.virt.vmwareapi.vmops [None req-3f6d5799-130b-400f-a44b-e36b3755cdae tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a could not be found. [ 1981.256815] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-3f6d5799-130b-400f-a44b-e36b3755cdae tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1981.256994] env[68040]: INFO nova.compute.manager [None req-3f6d5799-130b-400f-a44b-e36b3755cdae tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1981.257264] env[68040]: DEBUG oslo.service.loopingcall [None req-3f6d5799-130b-400f-a44b-e36b3755cdae tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1981.257472] env[68040]: DEBUG nova.compute.manager [-] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1981.257568] env[68040]: DEBUG nova.network.neutron [-] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1981.279618] env[68040]: DEBUG nova.network.neutron [-] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1981.287864] env[68040]: INFO nova.compute.manager [-] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] Took 0.03 seconds to deallocate network for instance. [ 1981.371596] env[68040]: DEBUG oslo_concurrency.lockutils [None req-3f6d5799-130b-400f-a44b-e36b3755cdae tempest-ServerTagsTestJSON-1451408559 tempest-ServerTagsTestJSON-1451408559-project-member] Lock "c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.156s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1981.372378] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 146.184s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1981.372568] env[68040]: INFO nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a] During sync_power_state the instance has a pending task (deleting). Skip. [ 1981.372742] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "c1eb9d61-e40e-4ccb-9d24-9bedd74c4f4a" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1988.422666] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Acquiring lock "e9994aad-8053-4936-ad4b-5347a1a62f4e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1988.422970] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Lock "e9994aad-8053-4936-ad4b-5347a1a62f4e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1988.433654] env[68040]: DEBUG nova.compute.manager [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1988.480547] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1988.480793] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1988.482159] env[68040]: INFO nova.compute.claims [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1988.626670] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-958e0e6e-2586-4dbb-8272-c3b2fbe6d8ae {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.634041] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96118ed4-1840-4a13-9e66-74b2bb7ef146 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.663085] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4bc2e0e-0932-4a22-b1f2-d59952006148 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.670017] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82328d50-ffbc-4dd5-8838-6f83f4d874e3 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.684096] env[68040]: DEBUG nova.compute.provider_tree [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1988.692216] env[68040]: DEBUG nova.scheduler.client.report [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1988.704915] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.224s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1988.705386] env[68040]: DEBUG nova.compute.manager [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Start building networks asynchronously for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1988.736496] env[68040]: DEBUG nova.compute.utils [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Using /dev/sd instead of None {{(pid=68040) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1988.737805] env[68040]: DEBUG nova.compute.manager [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Allocating IP information in the background. {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1988.737981] env[68040]: DEBUG nova.network.neutron [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] allocate_for_instance() {{(pid=68040) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1988.761041] env[68040]: DEBUG nova.compute.manager [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Start building block device mappings for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1988.813352] env[68040]: DEBUG nova.policy [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd5b784bb2384457e9bcc4e9ff02ea850', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9a2c3ee9bf1c40228a089e4b0e5bff00', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68040) authorize /opt/stack/nova/nova/policy.py:203}} [ 1988.833257] env[68040]: DEBUG nova.compute.manager [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Start spawning the instance on the hypervisor. {{(pid=68040) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1988.854262] env[68040]: DEBUG nova.virt.hardware [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:59:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:59:33Z,direct_url=,disk_format='vmdk',id=8c308313-03d5-40b6-a5fe-9037e32dc76e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0770d674a39c40089de0aade9440b370',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:59:34Z,virtual_size=,visibility=), allow threads: False {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1988.854547] env[68040]: DEBUG nova.virt.hardware [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Flavor limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1988.854723] env[68040]: DEBUG nova.virt.hardware [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Image limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1988.854961] env[68040]: DEBUG nova.virt.hardware [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Flavor pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1988.855138] env[68040]: DEBUG nova.virt.hardware [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Image pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1988.855291] env[68040]: DEBUG nova.virt.hardware [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1988.855503] env[68040]: DEBUG nova.virt.hardware [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1988.855670] env[68040]: DEBUG nova.virt.hardware [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1988.855839] env[68040]: DEBUG nova.virt.hardware [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Got 1 possible topologies {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1988.856011] env[68040]: DEBUG nova.virt.hardware [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1988.856207] env[68040]: DEBUG nova.virt.hardware [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1988.857064] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44089f94-7689-4d6c-8b1d-bb0151ca4d85 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.865095] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-096660ef-e9ea-4d01-b777-012863e9c420 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.115581] env[68040]: DEBUG nova.network.neutron [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Successfully created port: a9355509-5ea7-46b2-b536-176c40715fb1 {{(pid=68040) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1989.625948] env[68040]: DEBUG nova.compute.manager [req-0377b1f9-05b0-4a5d-b4b6-6550f6697e16 req-73751850-3bc8-4958-a55c-c66616cbee71 service nova] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Received event network-vif-plugged-a9355509-5ea7-46b2-b536-176c40715fb1 {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1989.626245] env[68040]: DEBUG oslo_concurrency.lockutils [req-0377b1f9-05b0-4a5d-b4b6-6550f6697e16 req-73751850-3bc8-4958-a55c-c66616cbee71 service nova] Acquiring lock "e9994aad-8053-4936-ad4b-5347a1a62f4e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1989.626431] env[68040]: DEBUG oslo_concurrency.lockutils [req-0377b1f9-05b0-4a5d-b4b6-6550f6697e16 req-73751850-3bc8-4958-a55c-c66616cbee71 service nova] Lock "e9994aad-8053-4936-ad4b-5347a1a62f4e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1989.626572] env[68040]: DEBUG oslo_concurrency.lockutils [req-0377b1f9-05b0-4a5d-b4b6-6550f6697e16 req-73751850-3bc8-4958-a55c-c66616cbee71 service nova] Lock "e9994aad-8053-4936-ad4b-5347a1a62f4e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1989.626722] env[68040]: DEBUG nova.compute.manager [req-0377b1f9-05b0-4a5d-b4b6-6550f6697e16 req-73751850-3bc8-4958-a55c-c66616cbee71 service nova] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] No waiting events found dispatching network-vif-plugged-a9355509-5ea7-46b2-b536-176c40715fb1 {{(pid=68040) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1989.626888] env[68040]: WARNING nova.compute.manager [req-0377b1f9-05b0-4a5d-b4b6-6550f6697e16 req-73751850-3bc8-4958-a55c-c66616cbee71 service nova] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Received unexpected event network-vif-plugged-a9355509-5ea7-46b2-b536-176c40715fb1 for instance with vm_state building and task_state spawning. [ 1989.708578] env[68040]: DEBUG nova.network.neutron [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Successfully updated port: a9355509-5ea7-46b2-b536-176c40715fb1 {{(pid=68040) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1989.721142] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Acquiring lock "refresh_cache-e9994aad-8053-4936-ad4b-5347a1a62f4e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1989.721352] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Acquired lock "refresh_cache-e9994aad-8053-4936-ad4b-5347a1a62f4e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1989.721550] env[68040]: DEBUG nova.network.neutron [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Building network info cache for instance {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1989.763381] env[68040]: DEBUG nova.network.neutron [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1989.921813] env[68040]: DEBUG nova.network.neutron [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Updating instance_info_cache with network_info: [{"id": "a9355509-5ea7-46b2-b536-176c40715fb1", "address": "fa:16:3e:ff:9b:32", "network": {"id": "9565e3df-4a40-4611-a5a9-efd2bc66053b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-780365588-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9a2c3ee9bf1c40228a089e4b0e5bff00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9355509-5e", "ovs_interfaceid": "a9355509-5ea7-46b2-b536-176c40715fb1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1989.938327] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Releasing lock "refresh_cache-e9994aad-8053-4936-ad4b-5347a1a62f4e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1989.938628] env[68040]: DEBUG nova.compute.manager [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Instance network_info: |[{"id": "a9355509-5ea7-46b2-b536-176c40715fb1", "address": "fa:16:3e:ff:9b:32", "network": {"id": "9565e3df-4a40-4611-a5a9-efd2bc66053b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-780365588-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9a2c3ee9bf1c40228a089e4b0e5bff00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9355509-5e", "ovs_interfaceid": "a9355509-5ea7-46b2-b536-176c40715fb1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1989.939068] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ff:9b:32', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7aa6264-122d-4c35-82d0-860e451538ea', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a9355509-5ea7-46b2-b536-176c40715fb1', 'vif_model': 'vmxnet3'}] {{(pid=68040) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1989.947221] env[68040]: DEBUG oslo.service.loopingcall [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1989.947730] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Creating VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1989.948044] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-18bb74ac-3ea3-4692-b3c5-8e897fefd367 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.967700] env[68040]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1989.967700] env[68040]: value = "task-3200341" [ 1989.967700] env[68040]: _type = "Task" [ 1989.967700] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1989.975083] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200341, 'name': CreateVM_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1989.982853] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1990.478733] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200341, 'name': CreateVM_Task, 'duration_secs': 0.298253} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1990.478934] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Created VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1990.485881] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1990.486062] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1990.486399] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1990.486635] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5bd682dc-4950-4a95-ab80-06bb89019738 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.491034] env[68040]: DEBUG oslo_vmware.api [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Waiting for the task: (returnval){ [ 1990.491034] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52118ad7-6243-56e6-3ea5-4927a06b8bcf" [ 1990.491034] env[68040]: _type = "Task" [ 1990.491034] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1990.499688] env[68040]: DEBUG oslo_vmware.api [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52118ad7-6243-56e6-3ea5-4927a06b8bcf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1991.000571] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1991.000960] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Processing image 8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1991.001043] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1991.650596] env[68040]: DEBUG nova.compute.manager [req-e950e4ad-f71d-4bcc-b322-11576b8f2a52 req-b80e1630-9e6f-47fe-9e21-c64ff4f59a15 service nova] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Received event network-changed-a9355509-5ea7-46b2-b536-176c40715fb1 {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1991.650801] env[68040]: DEBUG nova.compute.manager [req-e950e4ad-f71d-4bcc-b322-11576b8f2a52 req-b80e1630-9e6f-47fe-9e21-c64ff4f59a15 service nova] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Refreshing instance network info cache due to event network-changed-a9355509-5ea7-46b2-b536-176c40715fb1. {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1991.651015] env[68040]: DEBUG oslo_concurrency.lockutils [req-e950e4ad-f71d-4bcc-b322-11576b8f2a52 req-b80e1630-9e6f-47fe-9e21-c64ff4f59a15 service nova] Acquiring lock "refresh_cache-e9994aad-8053-4936-ad4b-5347a1a62f4e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1991.651171] env[68040]: DEBUG oslo_concurrency.lockutils [req-e950e4ad-f71d-4bcc-b322-11576b8f2a52 req-b80e1630-9e6f-47fe-9e21-c64ff4f59a15 service nova] Acquired lock "refresh_cache-e9994aad-8053-4936-ad4b-5347a1a62f4e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1991.651335] env[68040]: DEBUG nova.network.neutron [req-e950e4ad-f71d-4bcc-b322-11576b8f2a52 req-b80e1630-9e6f-47fe-9e21-c64ff4f59a15 service nova] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Refreshing network info cache for port a9355509-5ea7-46b2-b536-176c40715fb1 {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1991.894486] env[68040]: DEBUG nova.network.neutron [req-e950e4ad-f71d-4bcc-b322-11576b8f2a52 req-b80e1630-9e6f-47fe-9e21-c64ff4f59a15 service nova] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Updated VIF entry in instance network info cache for port a9355509-5ea7-46b2-b536-176c40715fb1. {{(pid=68040) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1991.894831] env[68040]: DEBUG nova.network.neutron [req-e950e4ad-f71d-4bcc-b322-11576b8f2a52 req-b80e1630-9e6f-47fe-9e21-c64ff4f59a15 service nova] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Updating instance_info_cache with network_info: [{"id": "a9355509-5ea7-46b2-b536-176c40715fb1", "address": "fa:16:3e:ff:9b:32", "network": {"id": "9565e3df-4a40-4611-a5a9-efd2bc66053b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-780365588-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9a2c3ee9bf1c40228a089e4b0e5bff00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9355509-5e", "ovs_interfaceid": "a9355509-5ea7-46b2-b536-176c40715fb1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1991.908933] env[68040]: DEBUG oslo_concurrency.lockutils [req-e950e4ad-f71d-4bcc-b322-11576b8f2a52 req-b80e1630-9e6f-47fe-9e21-c64ff4f59a15 service nova] Releasing lock "refresh_cache-e9994aad-8053-4936-ad4b-5347a1a62f4e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1995.983872] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1999.984051] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1999.984455] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Starting heal instance info cache {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1999.984455] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Rebuilding the list of instances to heal {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2000.002675] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2000.002841] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2000.002952] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2000.003084] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2000.003214] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2000.003345] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2000.003484] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2000.003604] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2000.003724] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Didn't find any instances for network info cache update. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2000.004252] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2001.984853] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2001.997061] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2001.997318] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2001.997499] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2001.997659] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68040) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2001.999177] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15914a10-10ef-4abf-883b-08591d6f1632 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.007711] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40cd6a22-bcc0-456e-8409-df38ba6dc7f1 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.021783] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8cb5293-47ea-4977-9f46-2efe5f59e39c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.027905] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b979d804-1911-4fd7-b851-ab34a2c0bc5f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.057506] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180991MB free_disk=125GB free_vcpus=48 pci_devices=None {{(pid=68040) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2002.057631] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2002.057841] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2002.123916] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 221a5bbe-7168-4f5c-ab49-8a149545655f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2002.123916] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 031481de-d52f-4f3f-80e5-0d0d6803d624 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2002.123916] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 2e44ead1-4676-4d9b-bbae-5082f505fc8b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2002.123916] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 0210d9d4-2161-4b06-bc81-9de361accca6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2002.124199] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 8f9a6934-9ded-4561-8d83-aacd4d79f29a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2002.124239] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 8a1a6866-1439-4f82-9fda-a7d9a7f211a3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2002.124369] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 92b0f3c0-2c87-478d-8b11-f0b05aee12ed actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2002.124506] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance e9994aad-8053-4936-ad4b-5347a1a62f4e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2002.124675] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2002.124812] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1536MB phys_disk=200GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2002.219690] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1a1c392-c95a-41a7-b7fb-b289abff9542 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.227716] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e817810c-ef98-4373-8d33-a842131636e2 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.257133] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-965067a3-049f-4edc-b409-cf69dc9ac01a {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.264073] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b501972-c1bf-4552-bd98-4550247d6002 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.276739] env[68040]: DEBUG nova.compute.provider_tree [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2002.284785] env[68040]: DEBUG nova.scheduler.client.report [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2002.299653] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68040) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2002.299832] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.242s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2003.299908] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2003.299908] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2003.299908] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2003.300414] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68040) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2005.981027] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2028.771582] env[68040]: WARNING oslo_vmware.rw_handles [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2028.771582] env[68040]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2028.771582] env[68040]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2028.771582] env[68040]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2028.771582] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2028.771582] env[68040]: ERROR oslo_vmware.rw_handles response.begin() [ 2028.771582] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2028.771582] env[68040]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2028.771582] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2028.771582] env[68040]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2028.771582] env[68040]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2028.771582] env[68040]: ERROR oslo_vmware.rw_handles [ 2028.772605] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Downloaded image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to vmware_temp/39e778b2-eb22-4b03-9eae-538a762130a1/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2028.774157] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Caching image {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2028.774462] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Copying Virtual Disk [datastore2] vmware_temp/39e778b2-eb22-4b03-9eae-538a762130a1/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk to [datastore2] vmware_temp/39e778b2-eb22-4b03-9eae-538a762130a1/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk {{(pid=68040) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2028.774751] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7c4773a6-f01f-44ae-b20c-1a72278a94d9 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.781949] env[68040]: DEBUG oslo_vmware.api [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Waiting for the task: (returnval){ [ 2028.781949] env[68040]: value = "task-3200342" [ 2028.781949] env[68040]: _type = "Task" [ 2028.781949] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2028.789366] env[68040]: DEBUG oslo_vmware.api [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Task: {'id': task-3200342, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2029.292383] env[68040]: DEBUG oslo_vmware.exceptions [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Fault InvalidArgument not matched. {{(pid=68040) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2029.292678] env[68040]: DEBUG oslo_concurrency.lockutils [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2029.293284] env[68040]: ERROR nova.compute.manager [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2029.293284] env[68040]: Faults: ['InvalidArgument'] [ 2029.293284] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Traceback (most recent call last): [ 2029.293284] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2029.293284] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] yield resources [ 2029.293284] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2029.293284] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] self.driver.spawn(context, instance, image_meta, [ 2029.293284] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2029.293284] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2029.293284] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2029.293284] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] self._fetch_image_if_missing(context, vi) [ 2029.293284] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2029.293675] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] image_cache(vi, tmp_image_ds_loc) [ 2029.293675] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2029.293675] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] vm_util.copy_virtual_disk( [ 2029.293675] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2029.293675] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] session._wait_for_task(vmdk_copy_task) [ 2029.293675] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2029.293675] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] return self.wait_for_task(task_ref) [ 2029.293675] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2029.293675] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] return evt.wait() [ 2029.293675] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2029.293675] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] result = hub.switch() [ 2029.293675] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2029.293675] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] return self.greenlet.switch() [ 2029.294075] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2029.294075] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] self.f(*self.args, **self.kw) [ 2029.294075] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2029.294075] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] raise exceptions.translate_fault(task_info.error) [ 2029.294075] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2029.294075] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Faults: ['InvalidArgument'] [ 2029.294075] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] [ 2029.294075] env[68040]: INFO nova.compute.manager [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Terminating instance [ 2029.295248] env[68040]: DEBUG oslo_concurrency.lockutils [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2029.295491] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2029.295732] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b1f8b02f-d23b-4152-b60d-17f5f518c236 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.298836] env[68040]: DEBUG nova.compute.manager [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2029.299044] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2029.299767] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6659414-5204-4d44-aded-07008865484e {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.306208] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Unregistering the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2029.306409] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2105fe5c-09e0-4c1d-b132-f358ae16ce7f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.308492] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2029.308668] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68040) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2029.309579] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3b53bf4f-d07f-481f-8768-60cf637154ff {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.313947] env[68040]: DEBUG oslo_vmware.api [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Waiting for the task: (returnval){ [ 2029.313947] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52ab2c41-0720-e22a-4a58-514f2e4c5b50" [ 2029.313947] env[68040]: _type = "Task" [ 2029.313947] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2029.321133] env[68040]: DEBUG oslo_vmware.api [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52ab2c41-0720-e22a-4a58-514f2e4c5b50, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2029.366774] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Unregistered the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2029.366983] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Deleting contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2029.367180] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Deleting the datastore file [datastore2] 221a5bbe-7168-4f5c-ab49-8a149545655f {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2029.367436] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-67fb375e-f80d-4352-9ca9-089a476a1e4c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.374725] env[68040]: DEBUG oslo_vmware.api [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Waiting for the task: (returnval){ [ 2029.374725] env[68040]: value = "task-3200344" [ 2029.374725] env[68040]: _type = "Task" [ 2029.374725] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2029.381852] env[68040]: DEBUG oslo_vmware.api [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Task: {'id': task-3200344, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2029.824584] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Preparing fetch location {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2029.825026] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Creating directory with path [datastore2] vmware_temp/c8605311-af7e-4d60-a045-c07c95d60f68/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2029.825171] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ce4573b9-d5d3-48a3-946d-eff46bc541ab {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.837664] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Created directory with path [datastore2] vmware_temp/c8605311-af7e-4d60-a045-c07c95d60f68/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2029.837816] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Fetch image to [datastore2] vmware_temp/c8605311-af7e-4d60-a045-c07c95d60f68/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2029.837974] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to [datastore2] vmware_temp/c8605311-af7e-4d60-a045-c07c95d60f68/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2029.838691] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7f10e72-6d89-474e-95b5-95242e07a864 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.845319] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-479af123-922d-4c64-a34d-ba4abcaf21ee {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.854336] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6155f7f5-3b98-4e0b-b9b4-7ed1c113222d {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.887648] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9721192-9afd-42bb-ae0a-b3bfd7316ae6 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.894390] env[68040]: DEBUG oslo_vmware.api [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Task: {'id': task-3200344, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.067866} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2029.895752] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Deleted the datastore file {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2029.895946] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Deleted contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2029.896133] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2029.896309] env[68040]: INFO nova.compute.manager [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2029.898051] env[68040]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-1a5e561c-117b-4cbc-a865-4db339634525 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.900223] env[68040]: DEBUG nova.compute.claims [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Aborting claim: {{(pid=68040) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2029.900426] env[68040]: DEBUG oslo_concurrency.lockutils [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2029.900632] env[68040]: DEBUG oslo_concurrency.lockutils [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2029.922581] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2029.982923] env[68040]: DEBUG oslo_vmware.rw_handles [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c8605311-af7e-4d60-a045-c07c95d60f68/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2030.043440] env[68040]: DEBUG oslo_vmware.rw_handles [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Completed reading data from the image iterator. {{(pid=68040) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2030.043634] env[68040]: DEBUG oslo_vmware.rw_handles [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c8605311-af7e-4d60-a045-c07c95d60f68/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2030.093333] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bca5e41d-2e3e-4120-98ad-9347212cde93 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.100616] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61416d08-1c4d-41f1-a91c-f2fcb66bb1f0 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.129461] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efa26fef-c26f-4e9f-b83c-cf53abba2c25 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.135878] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d88039f7-ff49-443f-a4bc-0436a03321c0 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.148261] env[68040]: DEBUG nova.compute.provider_tree [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2030.157806] env[68040]: DEBUG nova.scheduler.client.report [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2030.171736] env[68040]: DEBUG oslo_concurrency.lockutils [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.271s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2030.172307] env[68040]: ERROR nova.compute.manager [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2030.172307] env[68040]: Faults: ['InvalidArgument'] [ 2030.172307] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Traceback (most recent call last): [ 2030.172307] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2030.172307] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] self.driver.spawn(context, instance, image_meta, [ 2030.172307] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2030.172307] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2030.172307] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2030.172307] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] self._fetch_image_if_missing(context, vi) [ 2030.172307] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2030.172307] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] image_cache(vi, tmp_image_ds_loc) [ 2030.172307] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2030.172676] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] vm_util.copy_virtual_disk( [ 2030.172676] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2030.172676] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] session._wait_for_task(vmdk_copy_task) [ 2030.172676] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2030.172676] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] return self.wait_for_task(task_ref) [ 2030.172676] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2030.172676] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] return evt.wait() [ 2030.172676] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2030.172676] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] result = hub.switch() [ 2030.172676] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2030.172676] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] return self.greenlet.switch() [ 2030.172676] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2030.172676] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] self.f(*self.args, **self.kw) [ 2030.173060] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2030.173060] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] raise exceptions.translate_fault(task_info.error) [ 2030.173060] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2030.173060] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Faults: ['InvalidArgument'] [ 2030.173060] env[68040]: ERROR nova.compute.manager [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] [ 2030.173060] env[68040]: DEBUG nova.compute.utils [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] VimFaultException {{(pid=68040) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2030.174439] env[68040]: DEBUG nova.compute.manager [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Build of instance 221a5bbe-7168-4f5c-ab49-8a149545655f was re-scheduled: A specified parameter was not correct: fileType [ 2030.174439] env[68040]: Faults: ['InvalidArgument'] {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2030.174813] env[68040]: DEBUG nova.compute.manager [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Unplugging VIFs for instance {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2030.175026] env[68040]: DEBUG nova.compute.manager [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2030.175268] env[68040]: DEBUG nova.compute.manager [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2030.175465] env[68040]: DEBUG nova.network.neutron [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2030.480984] env[68040]: DEBUG nova.network.neutron [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2030.491800] env[68040]: INFO nova.compute.manager [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Took 0.32 seconds to deallocate network for instance. [ 2030.586804] env[68040]: INFO nova.scheduler.client.report [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Deleted allocations for instance 221a5bbe-7168-4f5c-ab49-8a149545655f [ 2030.608563] env[68040]: DEBUG oslo_concurrency.lockutils [None req-80fddbbd-17e7-45ee-a915-f8b0044ebcac tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Lock "221a5bbe-7168-4f5c-ab49-8a149545655f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 563.415s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2030.608914] env[68040]: DEBUG oslo_concurrency.lockutils [None req-b394bb2b-6266-4e45-8399-7ff3e17e4e9d tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Lock "221a5bbe-7168-4f5c-ab49-8a149545655f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 368.280s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2030.609084] env[68040]: DEBUG oslo_concurrency.lockutils [None req-b394bb2b-6266-4e45-8399-7ff3e17e4e9d tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Acquiring lock "221a5bbe-7168-4f5c-ab49-8a149545655f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2030.609277] env[68040]: DEBUG oslo_concurrency.lockutils [None req-b394bb2b-6266-4e45-8399-7ff3e17e4e9d tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Lock "221a5bbe-7168-4f5c-ab49-8a149545655f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2030.609450] env[68040]: DEBUG oslo_concurrency.lockutils [None req-b394bb2b-6266-4e45-8399-7ff3e17e4e9d tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Lock "221a5bbe-7168-4f5c-ab49-8a149545655f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2030.611442] env[68040]: INFO nova.compute.manager [None req-b394bb2b-6266-4e45-8399-7ff3e17e4e9d tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Terminating instance [ 2030.613145] env[68040]: DEBUG nova.compute.manager [None req-b394bb2b-6266-4e45-8399-7ff3e17e4e9d tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2030.613822] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-b394bb2b-6266-4e45-8399-7ff3e17e4e9d tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2030.613822] env[68040]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bf6e54e8-b914-419c-b793-2ad61013443b {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.623022] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6f7cc8a-cd55-4fbf-adad-156a23579787 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.651533] env[68040]: WARNING nova.virt.vmwareapi.vmops [None req-b394bb2b-6266-4e45-8399-7ff3e17e4e9d tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 221a5bbe-7168-4f5c-ab49-8a149545655f could not be found. [ 2030.651743] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-b394bb2b-6266-4e45-8399-7ff3e17e4e9d tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2030.651932] env[68040]: INFO nova.compute.manager [None req-b394bb2b-6266-4e45-8399-7ff3e17e4e9d tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2030.652206] env[68040]: DEBUG oslo.service.loopingcall [None req-b394bb2b-6266-4e45-8399-7ff3e17e4e9d tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2030.652434] env[68040]: DEBUG nova.compute.manager [-] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2030.652527] env[68040]: DEBUG nova.network.neutron [-] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2030.690332] env[68040]: DEBUG nova.network.neutron [-] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2030.698794] env[68040]: INFO nova.compute.manager [-] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] Took 0.05 seconds to deallocate network for instance. [ 2030.790157] env[68040]: DEBUG oslo_concurrency.lockutils [None req-b394bb2b-6266-4e45-8399-7ff3e17e4e9d tempest-ImagesOneServerNegativeTestJSON-651902863 tempest-ImagesOneServerNegativeTestJSON-651902863-project-member] Lock "221a5bbe-7168-4f5c-ab49-8a149545655f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.181s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2030.791014] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "221a5bbe-7168-4f5c-ab49-8a149545655f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 195.602s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2030.791225] env[68040]: INFO nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 221a5bbe-7168-4f5c-ab49-8a149545655f] During sync_power_state the instance has a pending task (deleting). Skip. [ 2030.791410] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "221a5bbe-7168-4f5c-ab49-8a149545655f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2050.983688] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2055.986522] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2059.683619] env[68040]: DEBUG oslo_concurrency.lockutils [None req-9a7c29b1-f776-4e01-b30c-c3de9a2ce7ff tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Acquiring lock "8a1a6866-1439-4f82-9fda-a7d9a7f211a3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2059.984053] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2059.984053] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Starting heal instance info cache {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2059.984296] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Rebuilding the list of instances to heal {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2060.001844] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2060.002019] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2060.002144] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2060.002274] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2060.002473] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2060.002568] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2060.002645] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2060.002764] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Didn't find any instances for network info cache update. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2061.984062] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2062.983531] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2062.983759] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2062.995704] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2062.996043] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2062.996091] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2062.996256] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68040) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2062.997847] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d3c7d52-e83e-4b6f-a8ad-f639d86d216f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.006713] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbe1a9cf-3548-40d6-a06c-f0c9a2331382 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.020683] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9166b87-38be-4c36-83a7-20e12982dc05 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.027061] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5e52ae4-dde8-46aa-9a4a-666761546919 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.055817] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180986MB free_disk=125GB free_vcpus=48 pci_devices=None {{(pid=68040) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2063.055956] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2063.056171] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2063.119484] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 031481de-d52f-4f3f-80e5-0d0d6803d624 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2063.119645] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 2e44ead1-4676-4d9b-bbae-5082f505fc8b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2063.119778] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 0210d9d4-2161-4b06-bc81-9de361accca6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2063.119901] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 8f9a6934-9ded-4561-8d83-aacd4d79f29a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2063.120035] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 8a1a6866-1439-4f82-9fda-a7d9a7f211a3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2063.120163] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 92b0f3c0-2c87-478d-8b11-f0b05aee12ed actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2063.120283] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance e9994aad-8053-4936-ad4b-5347a1a62f4e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2063.120467] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2063.120607] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1408MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2063.220645] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71a81348-484e-44c4-a42c-a8f1db6c18fb {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.227905] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84d4705b-eb64-4f2b-84f6-756149c91ff3 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.258962] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58fd224d-813e-4751-adb2-95c30ff53b4f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.265904] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11b86b14-5768-4f0b-ac09-b4e4a6ceb9cb {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.279031] env[68040]: DEBUG nova.compute.provider_tree [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2063.287043] env[68040]: DEBUG nova.scheduler.client.report [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2063.300802] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68040) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2063.300987] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.245s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2064.301536] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2064.984567] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2064.984766] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68040) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2067.979138] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2071.979532] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2078.744404] env[68040]: WARNING oslo_vmware.rw_handles [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2078.744404] env[68040]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2078.744404] env[68040]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2078.744404] env[68040]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2078.744404] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2078.744404] env[68040]: ERROR oslo_vmware.rw_handles response.begin() [ 2078.744404] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2078.744404] env[68040]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2078.744404] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2078.744404] env[68040]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2078.744404] env[68040]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2078.744404] env[68040]: ERROR oslo_vmware.rw_handles [ 2078.745167] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Downloaded image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to vmware_temp/c8605311-af7e-4d60-a045-c07c95d60f68/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2078.747289] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Caching image {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2078.747573] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Copying Virtual Disk [datastore2] vmware_temp/c8605311-af7e-4d60-a045-c07c95d60f68/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk to [datastore2] vmware_temp/c8605311-af7e-4d60-a045-c07c95d60f68/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk {{(pid=68040) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2078.747893] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-66df7872-3a0e-4360-b4e1-06d3ce6f6ed7 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.755712] env[68040]: DEBUG oslo_vmware.api [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Waiting for the task: (returnval){ [ 2078.755712] env[68040]: value = "task-3200345" [ 2078.755712] env[68040]: _type = "Task" [ 2078.755712] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2078.763664] env[68040]: DEBUG oslo_vmware.api [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Task: {'id': task-3200345, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2079.267889] env[68040]: DEBUG oslo_vmware.exceptions [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Fault InvalidArgument not matched. {{(pid=68040) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2079.268226] env[68040]: DEBUG oslo_concurrency.lockutils [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2079.268851] env[68040]: ERROR nova.compute.manager [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2079.268851] env[68040]: Faults: ['InvalidArgument'] [ 2079.268851] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Traceback (most recent call last): [ 2079.268851] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2079.268851] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] yield resources [ 2079.268851] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2079.268851] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] self.driver.spawn(context, instance, image_meta, [ 2079.268851] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2079.268851] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2079.268851] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2079.268851] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] self._fetch_image_if_missing(context, vi) [ 2079.268851] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2079.269267] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] image_cache(vi, tmp_image_ds_loc) [ 2079.269267] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2079.269267] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] vm_util.copy_virtual_disk( [ 2079.269267] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2079.269267] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] session._wait_for_task(vmdk_copy_task) [ 2079.269267] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2079.269267] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] return self.wait_for_task(task_ref) [ 2079.269267] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2079.269267] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] return evt.wait() [ 2079.269267] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2079.269267] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] result = hub.switch() [ 2079.269267] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2079.269267] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] return self.greenlet.switch() [ 2079.269778] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2079.269778] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] self.f(*self.args, **self.kw) [ 2079.269778] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2079.269778] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] raise exceptions.translate_fault(task_info.error) [ 2079.269778] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2079.269778] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Faults: ['InvalidArgument'] [ 2079.269778] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] [ 2079.269778] env[68040]: INFO nova.compute.manager [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Terminating instance [ 2079.270762] env[68040]: DEBUG oslo_concurrency.lockutils [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2079.270979] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2079.271241] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-01156639-95e6-4ea1-9c61-00a85023986e {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.273440] env[68040]: DEBUG nova.compute.manager [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2079.273633] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2079.274345] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfe3be27-6927-4fcd-a5ef-8a071f94bfa0 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.281980] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Unregistering the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2079.282245] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e610d2cf-62e0-4cd6-9c4d-2fb694e5796e {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.284315] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2079.284491] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68040) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2079.285451] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa9c6da9-ebf6-40f2-97d7-e2e979e58aeb {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.289828] env[68040]: DEBUG oslo_vmware.api [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Waiting for the task: (returnval){ [ 2079.289828] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]5200a6cf-6cda-607c-4e2e-592062591257" [ 2079.289828] env[68040]: _type = "Task" [ 2079.289828] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2079.296811] env[68040]: DEBUG oslo_vmware.api [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]5200a6cf-6cda-607c-4e2e-592062591257, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2079.356083] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Unregistered the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2079.356317] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Deleting contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2079.356502] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Deleting the datastore file [datastore2] 031481de-d52f-4f3f-80e5-0d0d6803d624 {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2079.356775] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-283e71e9-fffe-4fdc-8452-1f0f425c2b57 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.362847] env[68040]: DEBUG oslo_vmware.api [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Waiting for the task: (returnval){ [ 2079.362847] env[68040]: value = "task-3200347" [ 2079.362847] env[68040]: _type = "Task" [ 2079.362847] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2079.370158] env[68040]: DEBUG oslo_vmware.api [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Task: {'id': task-3200347, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2079.800556] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Preparing fetch location {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2079.800879] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Creating directory with path [datastore2] vmware_temp/57a725ef-da5c-4200-af40-4f37af5956bf/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2079.801018] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-36489dfd-2fa3-4b42-bd64-20d2401d6a00 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.812615] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Created directory with path [datastore2] vmware_temp/57a725ef-da5c-4200-af40-4f37af5956bf/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2079.812821] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Fetch image to [datastore2] vmware_temp/57a725ef-da5c-4200-af40-4f37af5956bf/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2079.813029] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to [datastore2] vmware_temp/57a725ef-da5c-4200-af40-4f37af5956bf/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2079.813747] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-654087f4-55be-4629-84e7-2fcf2917d296 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.819943] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4369f668-8688-453d-bffd-b6af6b87997f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.828609] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c198cfd-b888-4930-ae40-e463f99b75cc {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.858249] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47162462-e4a7-4649-98b5-0786bea71dd3 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.866222] env[68040]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-02e29ef3-bfd8-4f51-9926-3ac89c2b5250 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.871916] env[68040]: DEBUG oslo_vmware.api [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Task: {'id': task-3200347, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.083878} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2079.872159] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Deleted the datastore file {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2079.872341] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Deleted contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2079.872518] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2079.872695] env[68040]: INFO nova.compute.manager [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2079.875418] env[68040]: DEBUG nova.compute.claims [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Aborting claim: {{(pid=68040) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2079.875692] env[68040]: DEBUG oslo_concurrency.lockutils [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2079.876053] env[68040]: DEBUG oslo_concurrency.lockutils [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2079.886326] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2080.013844] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf492523-4f41-4d71-a5d9-36280cefc799 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.017270] env[68040]: DEBUG oslo_vmware.rw_handles [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/57a725ef-da5c-4200-af40-4f37af5956bf/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2080.075462] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73130d2d-691f-4995-89a4-8aa51237281e {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.081770] env[68040]: DEBUG oslo_vmware.rw_handles [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Completed reading data from the image iterator. {{(pid=68040) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2080.081937] env[68040]: DEBUG oslo_vmware.rw_handles [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/57a725ef-da5c-4200-af40-4f37af5956bf/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2080.107443] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f05d307-04d6-4ea7-9d8e-638876e86d9c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.114438] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac311218-e6c8-4a0c-8d5c-bbed96e65918 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.127230] env[68040]: DEBUG nova.compute.provider_tree [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2080.135806] env[68040]: DEBUG nova.scheduler.client.report [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2080.150676] env[68040]: DEBUG oslo_concurrency.lockutils [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.275s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2080.151337] env[68040]: ERROR nova.compute.manager [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2080.151337] env[68040]: Faults: ['InvalidArgument'] [ 2080.151337] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Traceback (most recent call last): [ 2080.151337] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2080.151337] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] self.driver.spawn(context, instance, image_meta, [ 2080.151337] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2080.151337] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2080.151337] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2080.151337] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] self._fetch_image_if_missing(context, vi) [ 2080.151337] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2080.151337] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] image_cache(vi, tmp_image_ds_loc) [ 2080.151337] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2080.152150] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] vm_util.copy_virtual_disk( [ 2080.152150] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2080.152150] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] session._wait_for_task(vmdk_copy_task) [ 2080.152150] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2080.152150] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] return self.wait_for_task(task_ref) [ 2080.152150] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2080.152150] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] return evt.wait() [ 2080.152150] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2080.152150] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] result = hub.switch() [ 2080.152150] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2080.152150] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] return self.greenlet.switch() [ 2080.152150] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2080.152150] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] self.f(*self.args, **self.kw) [ 2080.153444] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2080.153444] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] raise exceptions.translate_fault(task_info.error) [ 2080.153444] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2080.153444] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Faults: ['InvalidArgument'] [ 2080.153444] env[68040]: ERROR nova.compute.manager [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] [ 2080.153444] env[68040]: DEBUG nova.compute.utils [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] VimFaultException {{(pid=68040) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2080.154767] env[68040]: DEBUG nova.compute.manager [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Build of instance 031481de-d52f-4f3f-80e5-0d0d6803d624 was re-scheduled: A specified parameter was not correct: fileType [ 2080.154767] env[68040]: Faults: ['InvalidArgument'] {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2080.155253] env[68040]: DEBUG nova.compute.manager [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Unplugging VIFs for instance {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2080.155448] env[68040]: DEBUG nova.compute.manager [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2080.155649] env[68040]: DEBUG nova.compute.manager [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2080.155816] env[68040]: DEBUG nova.network.neutron [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2080.463499] env[68040]: DEBUG nova.network.neutron [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2080.478554] env[68040]: INFO nova.compute.manager [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Took 0.32 seconds to deallocate network for instance. [ 2080.580034] env[68040]: INFO nova.scheduler.client.report [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Deleted allocations for instance 031481de-d52f-4f3f-80e5-0d0d6803d624 [ 2080.601457] env[68040]: DEBUG oslo_concurrency.lockutils [None req-da4ba923-bb80-4a34-b852-b727a65f104a tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Lock "031481de-d52f-4f3f-80e5-0d0d6803d624" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 585.744s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2080.601731] env[68040]: DEBUG oslo_concurrency.lockutils [None req-6815c47a-688c-443e-baec-05b40836ccf9 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Lock "031481de-d52f-4f3f-80e5-0d0d6803d624" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 389.394s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2080.601953] env[68040]: DEBUG oslo_concurrency.lockutils [None req-6815c47a-688c-443e-baec-05b40836ccf9 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Acquiring lock "031481de-d52f-4f3f-80e5-0d0d6803d624-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2080.602184] env[68040]: DEBUG oslo_concurrency.lockutils [None req-6815c47a-688c-443e-baec-05b40836ccf9 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Lock "031481de-d52f-4f3f-80e5-0d0d6803d624-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2080.602353] env[68040]: DEBUG oslo_concurrency.lockutils [None req-6815c47a-688c-443e-baec-05b40836ccf9 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Lock "031481de-d52f-4f3f-80e5-0d0d6803d624-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2080.604594] env[68040]: INFO nova.compute.manager [None req-6815c47a-688c-443e-baec-05b40836ccf9 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Terminating instance [ 2080.607034] env[68040]: DEBUG nova.compute.manager [None req-6815c47a-688c-443e-baec-05b40836ccf9 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2080.607034] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-6815c47a-688c-443e-baec-05b40836ccf9 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2080.607190] env[68040]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9ed0a5d6-1ec0-421d-80f5-58479b78704f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.615981] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c627fc7f-5648-4ea2-b68f-51e622cd3ec7 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.644439] env[68040]: WARNING nova.virt.vmwareapi.vmops [None req-6815c47a-688c-443e-baec-05b40836ccf9 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 031481de-d52f-4f3f-80e5-0d0d6803d624 could not be found. [ 2080.644633] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-6815c47a-688c-443e-baec-05b40836ccf9 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2080.644807] env[68040]: INFO nova.compute.manager [None req-6815c47a-688c-443e-baec-05b40836ccf9 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2080.645118] env[68040]: DEBUG oslo.service.loopingcall [None req-6815c47a-688c-443e-baec-05b40836ccf9 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2080.645340] env[68040]: DEBUG nova.compute.manager [-] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2080.645437] env[68040]: DEBUG nova.network.neutron [-] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2080.667040] env[68040]: DEBUG nova.network.neutron [-] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2080.674384] env[68040]: INFO nova.compute.manager [-] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] Took 0.03 seconds to deallocate network for instance. [ 2080.758745] env[68040]: DEBUG oslo_concurrency.lockutils [None req-6815c47a-688c-443e-baec-05b40836ccf9 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Lock "031481de-d52f-4f3f-80e5-0d0d6803d624" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.157s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2080.759564] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "031481de-d52f-4f3f-80e5-0d0d6803d624" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 245.570s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2080.759757] env[68040]: INFO nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 031481de-d52f-4f3f-80e5-0d0d6803d624] During sync_power_state the instance has a pending task (deleting). Skip. [ 2080.759934] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "031481de-d52f-4f3f-80e5-0d0d6803d624" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2084.315181] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4ccc7789-ba86-465f-a4d5-8145aeca0310 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Acquiring lock "92b0f3c0-2c87-478d-8b11-f0b05aee12ed" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2110.983984] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2115.987632] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2120.984598] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2120.984916] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Starting heal instance info cache {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2120.984916] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Rebuilding the list of instances to heal {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2121.004328] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2121.004489] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2121.004626] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2121.004754] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2121.004882] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2121.005011] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2121.005144] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Didn't find any instances for network info cache update. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2122.984408] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2123.984225] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2123.984474] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2123.984762] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2123.984854] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Cleaning up deleted instances with incomplete migration {{(pid=68040) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11257}} [ 2124.993275] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2125.004307] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2125.004609] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2125.004786] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2125.004944] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68040) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2125.006135] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b2c0f7b-1a64-4651-b116-fdeb80215eca {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.015215] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7518ec5-8599-4b75-83a1-cfc220da7647 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.029946] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-809118d8-8590-4167-91bd-29672f59b90a {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.035969] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0909e60a-657e-42ea-9bdc-b98ddc3bb8a2 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.064619] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180995MB free_disk=125GB free_vcpus=48 pci_devices=None {{(pid=68040) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2125.064789] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2125.064959] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2125.157042] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 2e44ead1-4676-4d9b-bbae-5082f505fc8b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2125.157174] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 0210d9d4-2161-4b06-bc81-9de361accca6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2125.157312] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 8f9a6934-9ded-4561-8d83-aacd4d79f29a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2125.157439] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 8a1a6866-1439-4f82-9fda-a7d9a7f211a3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2125.157561] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 92b0f3c0-2c87-478d-8b11-f0b05aee12ed actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2125.157681] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance e9994aad-8053-4936-ad4b-5347a1a62f4e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2125.157872] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2125.158024] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2125.231375] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86ea3f36-79ac-4c60-82ca-a132bf723946 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.239175] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97f9c78d-ce7c-4569-b8ba-74b28a93e430 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.268902] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-878618d4-d4b5-4cc6-9c1a-8878bebdfb4b {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.275751] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66bc5d0f-ea18-4714-a07a-53a0639e5738 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.288490] env[68040]: DEBUG nova.compute.provider_tree [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2125.296314] env[68040]: DEBUG nova.scheduler.client.report [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2125.311010] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68040) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2125.311207] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.246s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2127.303141] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2127.303494] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68040) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2128.366013] env[68040]: WARNING oslo_vmware.rw_handles [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2128.366013] env[68040]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2128.366013] env[68040]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2128.366013] env[68040]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2128.366013] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2128.366013] env[68040]: ERROR oslo_vmware.rw_handles response.begin() [ 2128.366013] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2128.366013] env[68040]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2128.366013] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2128.366013] env[68040]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2128.366013] env[68040]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2128.366013] env[68040]: ERROR oslo_vmware.rw_handles [ 2128.366871] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Downloaded image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to vmware_temp/57a725ef-da5c-4200-af40-4f37af5956bf/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2128.368463] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Caching image {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2128.368730] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Copying Virtual Disk [datastore2] vmware_temp/57a725ef-da5c-4200-af40-4f37af5956bf/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk to [datastore2] vmware_temp/57a725ef-da5c-4200-af40-4f37af5956bf/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk {{(pid=68040) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2128.369015] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ea571b0c-1275-4b4b-afd0-2d7aa97583db {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.377377] env[68040]: DEBUG oslo_vmware.api [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Waiting for the task: (returnval){ [ 2128.377377] env[68040]: value = "task-3200348" [ 2128.377377] env[68040]: _type = "Task" [ 2128.377377] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2128.385345] env[68040]: DEBUG oslo_vmware.api [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Task: {'id': task-3200348, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2128.889491] env[68040]: DEBUG oslo_vmware.exceptions [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Fault InvalidArgument not matched. {{(pid=68040) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2128.889822] env[68040]: DEBUG oslo_concurrency.lockutils [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2128.890412] env[68040]: ERROR nova.compute.manager [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2128.890412] env[68040]: Faults: ['InvalidArgument'] [ 2128.890412] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Traceback (most recent call last): [ 2128.890412] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2128.890412] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] yield resources [ 2128.890412] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2128.890412] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] self.driver.spawn(context, instance, image_meta, [ 2128.890412] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2128.890412] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2128.890412] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2128.890412] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] self._fetch_image_if_missing(context, vi) [ 2128.890412] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2128.890412] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] image_cache(vi, tmp_image_ds_loc) [ 2128.890868] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2128.890868] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] vm_util.copy_virtual_disk( [ 2128.890868] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2128.890868] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] session._wait_for_task(vmdk_copy_task) [ 2128.890868] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2128.890868] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] return self.wait_for_task(task_ref) [ 2128.890868] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2128.890868] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] return evt.wait() [ 2128.890868] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2128.890868] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] result = hub.switch() [ 2128.890868] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2128.890868] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] return self.greenlet.switch() [ 2128.890868] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2128.891352] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] self.f(*self.args, **self.kw) [ 2128.891352] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2128.891352] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] raise exceptions.translate_fault(task_info.error) [ 2128.891352] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2128.891352] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Faults: ['InvalidArgument'] [ 2128.891352] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] [ 2128.891352] env[68040]: INFO nova.compute.manager [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Terminating instance [ 2128.892712] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2128.892936] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2128.893195] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-02579ce8-5a09-44d6-8a15-1217e6ed339c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.895562] env[68040]: DEBUG nova.compute.manager [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2128.895762] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2128.896528] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a04bcb06-6ef0-425c-b75c-feecdbf0dd17 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.903797] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Unregistering the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2128.904032] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c3a6a6fe-042b-4106-a3cf-543e0fbc5769 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.906888] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2128.907093] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68040) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2128.907759] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2fb550ba-6c72-4ec9-9721-dfbb42972559 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.913589] env[68040]: DEBUG oslo_vmware.api [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Waiting for the task: (returnval){ [ 2128.913589] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52047eac-24a9-8117-94a6-2f1d91ed672e" [ 2128.913589] env[68040]: _type = "Task" [ 2128.913589] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2128.921243] env[68040]: DEBUG oslo_vmware.api [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52047eac-24a9-8117-94a6-2f1d91ed672e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2128.972970] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Unregistered the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2128.973350] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Deleting contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2128.973652] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Deleting the datastore file [datastore2] 2e44ead1-4676-4d9b-bbae-5082f505fc8b {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2128.974049] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1be1d320-a435-4c1c-b960-2017306ad1e1 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.982024] env[68040]: DEBUG oslo_vmware.api [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Waiting for the task: (returnval){ [ 2128.982024] env[68040]: value = "task-3200350" [ 2128.982024] env[68040]: _type = "Task" [ 2128.982024] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2128.993191] env[68040]: DEBUG oslo_vmware.api [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Task: {'id': task-3200350, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2129.423985] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Preparing fetch location {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2129.424380] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Creating directory with path [datastore2] vmware_temp/bc6d8273-0297-4808-b1b2-f9d52a5e08cf/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2129.424539] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dba470b6-5e45-424d-a662-4c57bbeb440b {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.436308] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Created directory with path [datastore2] vmware_temp/bc6d8273-0297-4808-b1b2-f9d52a5e08cf/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2129.436548] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Fetch image to [datastore2] vmware_temp/bc6d8273-0297-4808-b1b2-f9d52a5e08cf/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2129.436731] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to [datastore2] vmware_temp/bc6d8273-0297-4808-b1b2-f9d52a5e08cf/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2129.437532] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98ecc540-a410-471b-be12-0815feb220a6 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.444742] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b0b8bc8-c755-4dbb-bc1c-b88d2287f8f2 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.454030] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2865cb36-03e7-40e7-aac2-b9882e01cddb {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.489210] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3247439-11da-4499-a873-5e1802622c81 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.496686] env[68040]: DEBUG oslo_vmware.api [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Task: {'id': task-3200350, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.082315} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2129.498229] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Deleted the datastore file {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2129.498435] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Deleted contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2129.498604] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2129.498782] env[68040]: INFO nova.compute.manager [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2129.500665] env[68040]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-abc128f5-20b4-4b54-a2b8-c8a2247cd8e2 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.502640] env[68040]: DEBUG nova.compute.claims [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Aborting claim: {{(pid=68040) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2129.502814] env[68040]: DEBUG oslo_concurrency.lockutils [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2129.503048] env[68040]: DEBUG oslo_concurrency.lockutils [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2129.525199] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2129.585033] env[68040]: DEBUG oslo_vmware.rw_handles [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/bc6d8273-0297-4808-b1b2-f9d52a5e08cf/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2129.681784] env[68040]: DEBUG oslo_vmware.rw_handles [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Completed reading data from the image iterator. {{(pid=68040) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2129.681996] env[68040]: DEBUG oslo_vmware.rw_handles [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/bc6d8273-0297-4808-b1b2-f9d52a5e08cf/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2129.725920] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b96ce61-04e3-490f-92b7-1fd930621e4d {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.737186] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98507c85-52d7-43ee-9d18-26e350238f40 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.787400] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edd51163-28e4-4c2a-b3a0-d1d74cc446ee {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.798246] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-218c0d09-0da7-44a6-b6ec-40e18e69560a {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.520747] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2130.530941] env[68040]: DEBUG nova.compute.provider_tree [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2130.539373] env[68040]: DEBUG nova.scheduler.client.report [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2130.552257] env[68040]: DEBUG oslo_concurrency.lockutils [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.049s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2130.552769] env[68040]: ERROR nova.compute.manager [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2130.552769] env[68040]: Faults: ['InvalidArgument'] [ 2130.552769] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Traceback (most recent call last): [ 2130.552769] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2130.552769] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] self.driver.spawn(context, instance, image_meta, [ 2130.552769] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2130.552769] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2130.552769] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2130.552769] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] self._fetch_image_if_missing(context, vi) [ 2130.552769] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2130.552769] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] image_cache(vi, tmp_image_ds_loc) [ 2130.552769] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2130.553184] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] vm_util.copy_virtual_disk( [ 2130.553184] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2130.553184] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] session._wait_for_task(vmdk_copy_task) [ 2130.553184] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2130.553184] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] return self.wait_for_task(task_ref) [ 2130.553184] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2130.553184] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] return evt.wait() [ 2130.553184] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2130.553184] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] result = hub.switch() [ 2130.553184] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2130.553184] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] return self.greenlet.switch() [ 2130.553184] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2130.553184] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] self.f(*self.args, **self.kw) [ 2130.553584] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2130.553584] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] raise exceptions.translate_fault(task_info.error) [ 2130.553584] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2130.553584] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Faults: ['InvalidArgument'] [ 2130.553584] env[68040]: ERROR nova.compute.manager [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] [ 2130.553584] env[68040]: DEBUG nova.compute.utils [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] VimFaultException {{(pid=68040) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2130.554992] env[68040]: DEBUG nova.compute.manager [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Build of instance 2e44ead1-4676-4d9b-bbae-5082f505fc8b was re-scheduled: A specified parameter was not correct: fileType [ 2130.554992] env[68040]: Faults: ['InvalidArgument'] {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2130.555382] env[68040]: DEBUG nova.compute.manager [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Unplugging VIFs for instance {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2130.555556] env[68040]: DEBUG nova.compute.manager [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2130.555726] env[68040]: DEBUG nova.compute.manager [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2130.555898] env[68040]: DEBUG nova.network.neutron [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2130.869403] env[68040]: DEBUG nova.network.neutron [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2130.880215] env[68040]: INFO nova.compute.manager [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Took 0.32 seconds to deallocate network for instance. [ 2130.982626] env[68040]: INFO nova.scheduler.client.report [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Deleted allocations for instance 2e44ead1-4676-4d9b-bbae-5082f505fc8b [ 2130.989120] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2130.989120] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Cleaning up deleted instances {{(pid=68040) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11219}} [ 2130.999386] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] There are 0 instances to clean {{(pid=68040) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 2131.013545] env[68040]: DEBUG oslo_concurrency.lockutils [None req-386790f9-b6c5-4efb-baa4-da55c64e048b tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Lock "2e44ead1-4676-4d9b-bbae-5082f505fc8b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 624.994s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2131.013834] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e1fee5c4-78a7-4d1f-b4c8-ea66baa8d6ba tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Lock "2e44ead1-4676-4d9b-bbae-5082f505fc8b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 429.336s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2131.014132] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e1fee5c4-78a7-4d1f-b4c8-ea66baa8d6ba tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Acquiring lock "2e44ead1-4676-4d9b-bbae-5082f505fc8b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2131.014305] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e1fee5c4-78a7-4d1f-b4c8-ea66baa8d6ba tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Lock "2e44ead1-4676-4d9b-bbae-5082f505fc8b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2131.014479] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e1fee5c4-78a7-4d1f-b4c8-ea66baa8d6ba tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Lock "2e44ead1-4676-4d9b-bbae-5082f505fc8b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2131.016992] env[68040]: INFO nova.compute.manager [None req-e1fee5c4-78a7-4d1f-b4c8-ea66baa8d6ba tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Terminating instance [ 2131.018631] env[68040]: DEBUG nova.compute.manager [None req-e1fee5c4-78a7-4d1f-b4c8-ea66baa8d6ba tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2131.018827] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-e1fee5c4-78a7-4d1f-b4c8-ea66baa8d6ba tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2131.019298] env[68040]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f6141901-7470-4330-89d5-bf69671f4529 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2131.029024] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31e07458-923f-4c85-a0b1-7e4f32152cfd {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2131.054228] env[68040]: WARNING nova.virt.vmwareapi.vmops [None req-e1fee5c4-78a7-4d1f-b4c8-ea66baa8d6ba tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2e44ead1-4676-4d9b-bbae-5082f505fc8b could not be found. [ 2131.054425] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-e1fee5c4-78a7-4d1f-b4c8-ea66baa8d6ba tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2131.054622] env[68040]: INFO nova.compute.manager [None req-e1fee5c4-78a7-4d1f-b4c8-ea66baa8d6ba tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2131.054887] env[68040]: DEBUG oslo.service.loopingcall [None req-e1fee5c4-78a7-4d1f-b4c8-ea66baa8d6ba tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2131.055353] env[68040]: DEBUG nova.compute.manager [-] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2131.055455] env[68040]: DEBUG nova.network.neutron [-] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2131.078824] env[68040]: DEBUG nova.network.neutron [-] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2131.086822] env[68040]: INFO nova.compute.manager [-] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] Took 0.03 seconds to deallocate network for instance. [ 2131.178740] env[68040]: DEBUG oslo_concurrency.lockutils [None req-e1fee5c4-78a7-4d1f-b4c8-ea66baa8d6ba tempest-ImagesTestJSON-969633667 tempest-ImagesTestJSON-969633667-project-member] Lock "2e44ead1-4676-4d9b-bbae-5082f505fc8b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.165s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2131.179548] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "2e44ead1-4676-4d9b-bbae-5082f505fc8b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 295.990s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2131.180056] env[68040]: INFO nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 2e44ead1-4676-4d9b-bbae-5082f505fc8b] During sync_power_state the instance has a pending task (deleting). Skip. [ 2131.180261] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "2e44ead1-4676-4d9b-bbae-5082f505fc8b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2133.984236] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2170.993543] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2177.985600] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2178.387272] env[68040]: WARNING oslo_vmware.rw_handles [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2178.387272] env[68040]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2178.387272] env[68040]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2178.387272] env[68040]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2178.387272] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2178.387272] env[68040]: ERROR oslo_vmware.rw_handles response.begin() [ 2178.387272] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2178.387272] env[68040]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2178.387272] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2178.387272] env[68040]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2178.387272] env[68040]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2178.387272] env[68040]: ERROR oslo_vmware.rw_handles [ 2178.387946] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Downloaded image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to vmware_temp/bc6d8273-0297-4808-b1b2-f9d52a5e08cf/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2178.389915] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Caching image {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2178.390199] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Copying Virtual Disk [datastore2] vmware_temp/bc6d8273-0297-4808-b1b2-f9d52a5e08cf/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk to [datastore2] vmware_temp/bc6d8273-0297-4808-b1b2-f9d52a5e08cf/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk {{(pid=68040) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2178.390485] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bc5d8ac3-37b6-4c28-be0f-b6ab33b55b46 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.398762] env[68040]: DEBUG oslo_vmware.api [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Waiting for the task: (returnval){ [ 2178.398762] env[68040]: value = "task-3200351" [ 2178.398762] env[68040]: _type = "Task" [ 2178.398762] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2178.406492] env[68040]: DEBUG oslo_vmware.api [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Task: {'id': task-3200351, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2178.909336] env[68040]: DEBUG oslo_vmware.exceptions [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Fault InvalidArgument not matched. {{(pid=68040) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2178.909601] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2178.910145] env[68040]: ERROR nova.compute.manager [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2178.910145] env[68040]: Faults: ['InvalidArgument'] [ 2178.910145] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Traceback (most recent call last): [ 2178.910145] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2178.910145] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] yield resources [ 2178.910145] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2178.910145] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] self.driver.spawn(context, instance, image_meta, [ 2178.910145] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2178.910145] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2178.910145] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2178.910145] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] self._fetch_image_if_missing(context, vi) [ 2178.910145] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2178.910542] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] image_cache(vi, tmp_image_ds_loc) [ 2178.910542] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2178.910542] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] vm_util.copy_virtual_disk( [ 2178.910542] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2178.910542] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] session._wait_for_task(vmdk_copy_task) [ 2178.910542] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2178.910542] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] return self.wait_for_task(task_ref) [ 2178.910542] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2178.910542] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] return evt.wait() [ 2178.910542] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2178.910542] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] result = hub.switch() [ 2178.910542] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2178.910542] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] return self.greenlet.switch() [ 2178.910921] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2178.910921] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] self.f(*self.args, **self.kw) [ 2178.910921] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2178.910921] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] raise exceptions.translate_fault(task_info.error) [ 2178.910921] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2178.910921] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Faults: ['InvalidArgument'] [ 2178.910921] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] [ 2178.910921] env[68040]: INFO nova.compute.manager [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Terminating instance [ 2178.911996] env[68040]: DEBUG oslo_concurrency.lockutils [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2178.912235] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2178.912477] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6364e2eb-1562-4d3e-b413-56f74df2470e {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.914546] env[68040]: DEBUG nova.compute.manager [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2178.914737] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2178.915444] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d1b8d21-b865-4d50-b88d-f96907397d04 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.921950] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Unregistering the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2178.922166] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0c484a66-578b-4fc3-83ab-9e250dd089b0 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.924147] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2178.924326] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68040) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2178.925251] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-233258cd-6cfe-462f-bccb-4dfefff14d0e {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.929679] env[68040]: DEBUG oslo_vmware.api [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Waiting for the task: (returnval){ [ 2178.929679] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]525cafa5-cf03-2222-c18d-e86fa4220cf7" [ 2178.929679] env[68040]: _type = "Task" [ 2178.929679] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2178.937279] env[68040]: DEBUG oslo_vmware.api [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]525cafa5-cf03-2222-c18d-e86fa4220cf7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2178.987554] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Unregistered the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2178.987991] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Deleting contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2178.987991] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Deleting the datastore file [datastore2] 0210d9d4-2161-4b06-bc81-9de361accca6 {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2178.988225] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0854ae63-857f-4b72-a4f3-6f02078c2fed {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.993893] env[68040]: DEBUG oslo_vmware.api [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Waiting for the task: (returnval){ [ 2178.993893] env[68040]: value = "task-3200353" [ 2178.993893] env[68040]: _type = "Task" [ 2178.993893] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2179.001159] env[68040]: DEBUG oslo_vmware.api [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Task: {'id': task-3200353, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2179.439849] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Preparing fetch location {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2179.440114] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Creating directory with path [datastore2] vmware_temp/d4394c8c-4d18-48c2-b540-be2131a9076f/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2179.440355] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d21c3b98-1f7c-4306-b667-43fa75087438 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.451794] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Created directory with path [datastore2] vmware_temp/d4394c8c-4d18-48c2-b540-be2131a9076f/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2179.451998] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Fetch image to [datastore2] vmware_temp/d4394c8c-4d18-48c2-b540-be2131a9076f/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2179.452154] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to [datastore2] vmware_temp/d4394c8c-4d18-48c2-b540-be2131a9076f/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2179.452880] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff4a83c2-c68f-4d48-80d9-40f2c94a4c63 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.459286] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0a71366-41dd-4ea9-ad63-e82121c7134b {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.468090] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14d25c8e-3c42-46db-8357-693b8798b3e5 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.500224] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-743da92c-f064-4d85-88fc-e37dbcc76015 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.507061] env[68040]: DEBUG oslo_vmware.api [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Task: {'id': task-3200353, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.074069} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2179.508373] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Deleted the datastore file {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2179.508563] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Deleted contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2179.508733] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2179.508909] env[68040]: INFO nova.compute.manager [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Took 0.59 seconds to destroy the instance on the hypervisor. [ 2179.510628] env[68040]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-8ebd5f4c-14c1-490e-aacf-386bc8be51c0 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.512396] env[68040]: DEBUG nova.compute.claims [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Aborting claim: {{(pid=68040) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2179.512581] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2179.512786] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2179.532903] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2179.588743] env[68040]: DEBUG oslo_vmware.rw_handles [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d4394c8c-4d18-48c2-b540-be2131a9076f/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2179.652871] env[68040]: DEBUG oslo_vmware.rw_handles [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Completed reading data from the image iterator. {{(pid=68040) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2179.653075] env[68040]: DEBUG oslo_vmware.rw_handles [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d4394c8c-4d18-48c2-b540-be2131a9076f/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2179.684812] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da365ea5-1a54-4394-8f42-e663ca8ebc8b {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.692709] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d76d0539-757a-4534-b775-c44496cc98bf {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.722334] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-517953cc-c749-4ae1-9fb3-73b0da2d0f48 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.729009] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da19348b-e392-4555-8b6f-a2806b34a54b {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.741469] env[68040]: DEBUG nova.compute.provider_tree [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2179.749644] env[68040]: DEBUG nova.scheduler.client.report [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2179.763902] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.251s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2179.764429] env[68040]: ERROR nova.compute.manager [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2179.764429] env[68040]: Faults: ['InvalidArgument'] [ 2179.764429] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Traceback (most recent call last): [ 2179.764429] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2179.764429] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] self.driver.spawn(context, instance, image_meta, [ 2179.764429] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2179.764429] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2179.764429] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2179.764429] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] self._fetch_image_if_missing(context, vi) [ 2179.764429] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2179.764429] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] image_cache(vi, tmp_image_ds_loc) [ 2179.764429] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2179.764801] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] vm_util.copy_virtual_disk( [ 2179.764801] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2179.764801] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] session._wait_for_task(vmdk_copy_task) [ 2179.764801] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2179.764801] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] return self.wait_for_task(task_ref) [ 2179.764801] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2179.764801] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] return evt.wait() [ 2179.764801] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2179.764801] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] result = hub.switch() [ 2179.764801] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2179.764801] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] return self.greenlet.switch() [ 2179.764801] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2179.764801] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] self.f(*self.args, **self.kw) [ 2179.765175] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2179.765175] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] raise exceptions.translate_fault(task_info.error) [ 2179.765175] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2179.765175] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Faults: ['InvalidArgument'] [ 2179.765175] env[68040]: ERROR nova.compute.manager [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] [ 2179.765175] env[68040]: DEBUG nova.compute.utils [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] VimFaultException {{(pid=68040) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2179.766496] env[68040]: DEBUG nova.compute.manager [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Build of instance 0210d9d4-2161-4b06-bc81-9de361accca6 was re-scheduled: A specified parameter was not correct: fileType [ 2179.766496] env[68040]: Faults: ['InvalidArgument'] {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2179.766890] env[68040]: DEBUG nova.compute.manager [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Unplugging VIFs for instance {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2179.767083] env[68040]: DEBUG nova.compute.manager [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2179.767261] env[68040]: DEBUG nova.compute.manager [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2179.767425] env[68040]: DEBUG nova.network.neutron [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2180.068427] env[68040]: DEBUG nova.network.neutron [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2180.080025] env[68040]: INFO nova.compute.manager [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Took 0.31 seconds to deallocate network for instance. [ 2180.184971] env[68040]: INFO nova.scheduler.client.report [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Deleted allocations for instance 0210d9d4-2161-4b06-bc81-9de361accca6 [ 2180.206348] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a4e4cc78-4c7a-41e7-acc3-5cdaabaec258 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Lock "0210d9d4-2161-4b06-bc81-9de361accca6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 584.441s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2180.206623] env[68040]: DEBUG oslo_concurrency.lockutils [None req-32eb3e75-d6d3-4bc4-95a0-2b133d158a51 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Lock "0210d9d4-2161-4b06-bc81-9de361accca6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 388.365s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2180.206902] env[68040]: DEBUG oslo_concurrency.lockutils [None req-32eb3e75-d6d3-4bc4-95a0-2b133d158a51 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Acquiring lock "0210d9d4-2161-4b06-bc81-9de361accca6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2180.207175] env[68040]: DEBUG oslo_concurrency.lockutils [None req-32eb3e75-d6d3-4bc4-95a0-2b133d158a51 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Lock "0210d9d4-2161-4b06-bc81-9de361accca6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2180.207335] env[68040]: DEBUG oslo_concurrency.lockutils [None req-32eb3e75-d6d3-4bc4-95a0-2b133d158a51 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Lock "0210d9d4-2161-4b06-bc81-9de361accca6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2180.209830] env[68040]: INFO nova.compute.manager [None req-32eb3e75-d6d3-4bc4-95a0-2b133d158a51 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Terminating instance [ 2180.212644] env[68040]: DEBUG nova.compute.manager [None req-32eb3e75-d6d3-4bc4-95a0-2b133d158a51 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2180.213152] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-32eb3e75-d6d3-4bc4-95a0-2b133d158a51 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2180.213427] env[68040]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d7bc34d6-6c17-4695-b680-0f95c4a0fbda {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.223544] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-765c10c0-faf4-40db-a7f5-d4b8677a13f1 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.249228] env[68040]: WARNING nova.virt.vmwareapi.vmops [None req-32eb3e75-d6d3-4bc4-95a0-2b133d158a51 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0210d9d4-2161-4b06-bc81-9de361accca6 could not be found. [ 2180.249440] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-32eb3e75-d6d3-4bc4-95a0-2b133d158a51 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2180.249627] env[68040]: INFO nova.compute.manager [None req-32eb3e75-d6d3-4bc4-95a0-2b133d158a51 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2180.249874] env[68040]: DEBUG oslo.service.loopingcall [None req-32eb3e75-d6d3-4bc4-95a0-2b133d158a51 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2180.250101] env[68040]: DEBUG nova.compute.manager [-] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2180.250199] env[68040]: DEBUG nova.network.neutron [-] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2180.273043] env[68040]: DEBUG nova.network.neutron [-] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2180.281365] env[68040]: INFO nova.compute.manager [-] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] Took 0.03 seconds to deallocate network for instance. [ 2180.366312] env[68040]: DEBUG oslo_concurrency.lockutils [None req-32eb3e75-d6d3-4bc4-95a0-2b133d158a51 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Lock "0210d9d4-2161-4b06-bc81-9de361accca6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.160s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2180.367139] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "0210d9d4-2161-4b06-bc81-9de361accca6" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 345.177s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2180.367336] env[68040]: INFO nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 0210d9d4-2161-4b06-bc81-9de361accca6] During sync_power_state the instance has a pending task (deleting). Skip. [ 2180.367509] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "0210d9d4-2161-4b06-bc81-9de361accca6" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2180.983458] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2180.983652] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Starting heal instance info cache {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2180.983752] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Rebuilding the list of instances to heal {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2180.999273] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2180.999518] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2180.999730] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2180.999932] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2181.000157] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Didn't find any instances for network info cache update. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2182.983822] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2184.548466] env[68040]: DEBUG oslo_concurrency.lockutils [None req-2ba91fa1-e2d7-40f1-b5c3-56dc892cae5c tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Acquiring lock "e9994aad-8053-4936-ad4b-5347a1a62f4e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2185.983775] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2185.984222] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2186.983639] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2186.983846] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68040) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2186.984117] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2186.996064] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2186.996303] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2186.996511] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2186.996674] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68040) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2186.997796] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-865a0fb5-1f43-41fc-944a-a5c450098102 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.006273] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-448ea3ea-caa8-4ffb-b40f-75e96b5fc7f9 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.019845] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2051a91a-97a1-4e3d-a3b7-c0ba103ad9f9 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.025926] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cefe6f22-ba00-4309-bd63-b6ed261ad38b {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.053594] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180955MB free_disk=125GB free_vcpus=48 pci_devices=None {{(pid=68040) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2187.053736] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2187.053917] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2187.103930] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 8f9a6934-9ded-4561-8d83-aacd4d79f29a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2187.104117] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 8a1a6866-1439-4f82-9fda-a7d9a7f211a3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2187.104255] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 92b0f3c0-2c87-478d-8b11-f0b05aee12ed actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2187.104378] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance e9994aad-8053-4936-ad4b-5347a1a62f4e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2187.104556] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2187.104695] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1024MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2187.159511] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c361e11-2fca-4a0c-8d6a-6a33f76812c9 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.166525] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2fd4daa-fa67-4605-9267-d8beaa2fb6cf {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.194525] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e12bf15f-6c8a-4a64-a7de-6b6803ceb855 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.201294] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c9acd24-4f2d-4cda-8b07-b7b8a4337f09 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.213699] env[68040]: DEBUG nova.compute.provider_tree [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2187.222265] env[68040]: DEBUG nova.scheduler.client.report [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2187.236290] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68040) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2187.236472] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.183s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2191.231835] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2195.979621] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2229.464159] env[68040]: WARNING oslo_vmware.rw_handles [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2229.464159] env[68040]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2229.464159] env[68040]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2229.464159] env[68040]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2229.464159] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2229.464159] env[68040]: ERROR oslo_vmware.rw_handles response.begin() [ 2229.464159] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2229.464159] env[68040]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2229.464159] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2229.464159] env[68040]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2229.464159] env[68040]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2229.464159] env[68040]: ERROR oslo_vmware.rw_handles [ 2229.464887] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Downloaded image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to vmware_temp/d4394c8c-4d18-48c2-b540-be2131a9076f/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2229.466567] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Caching image {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2229.466810] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Copying Virtual Disk [datastore2] vmware_temp/d4394c8c-4d18-48c2-b540-be2131a9076f/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk to [datastore2] vmware_temp/d4394c8c-4d18-48c2-b540-be2131a9076f/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk {{(pid=68040) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2229.467127] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-30d45d05-bd90-4477-a6b6-14ff0bff0007 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2229.477050] env[68040]: DEBUG oslo_vmware.api [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Waiting for the task: (returnval){ [ 2229.477050] env[68040]: value = "task-3200354" [ 2229.477050] env[68040]: _type = "Task" [ 2229.477050] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2229.484802] env[68040]: DEBUG oslo_vmware.api [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Task: {'id': task-3200354, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2229.988047] env[68040]: DEBUG oslo_vmware.exceptions [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Fault InvalidArgument not matched. {{(pid=68040) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2229.988332] env[68040]: DEBUG oslo_concurrency.lockutils [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2229.988938] env[68040]: ERROR nova.compute.manager [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2229.988938] env[68040]: Faults: ['InvalidArgument'] [ 2229.988938] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Traceback (most recent call last): [ 2229.988938] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2229.988938] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] yield resources [ 2229.988938] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2229.988938] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] self.driver.spawn(context, instance, image_meta, [ 2229.988938] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2229.988938] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2229.988938] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2229.988938] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] self._fetch_image_if_missing(context, vi) [ 2229.988938] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2229.989496] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] image_cache(vi, tmp_image_ds_loc) [ 2229.989496] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2229.989496] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] vm_util.copy_virtual_disk( [ 2229.989496] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2229.989496] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] session._wait_for_task(vmdk_copy_task) [ 2229.989496] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2229.989496] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] return self.wait_for_task(task_ref) [ 2229.989496] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2229.989496] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] return evt.wait() [ 2229.989496] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2229.989496] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] result = hub.switch() [ 2229.989496] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2229.989496] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] return self.greenlet.switch() [ 2229.989905] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2229.989905] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] self.f(*self.args, **self.kw) [ 2229.989905] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2229.989905] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] raise exceptions.translate_fault(task_info.error) [ 2229.989905] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2229.989905] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Faults: ['InvalidArgument'] [ 2229.989905] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] [ 2229.989905] env[68040]: INFO nova.compute.manager [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Terminating instance [ 2229.990862] env[68040]: DEBUG oslo_concurrency.lockutils [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2229.991087] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2229.991327] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d5781f75-b2a0-45ce-bd34-ca3d99a82a27 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2229.993664] env[68040]: DEBUG nova.compute.manager [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2229.993813] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2229.994610] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96b36ed5-d674-4713-a1b5-d9e878b28b20 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.001338] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Unregistering the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2230.001551] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-038c7189-8a0c-48c7-a472-78cecf4990e6 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.003666] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2230.003863] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68040) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2230.004804] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e9b31b7-146b-4868-bb29-1a20ac414b28 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.011481] env[68040]: DEBUG oslo_vmware.api [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Waiting for the task: (returnval){ [ 2230.011481] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52df23f0-bfa9-87b1-956a-73125424ebd9" [ 2230.011481] env[68040]: _type = "Task" [ 2230.011481] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2230.022195] env[68040]: DEBUG oslo_vmware.api [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52df23f0-bfa9-87b1-956a-73125424ebd9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2230.076408] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Unregistered the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2230.076613] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Deleting contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2230.076791] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Deleting the datastore file [datastore2] 8f9a6934-9ded-4561-8d83-aacd4d79f29a {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2230.077069] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fbf7ff8d-587b-41f1-9760-68cc8201e0c3 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.083483] env[68040]: DEBUG oslo_vmware.api [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Waiting for the task: (returnval){ [ 2230.083483] env[68040]: value = "task-3200356" [ 2230.083483] env[68040]: _type = "Task" [ 2230.083483] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2230.090773] env[68040]: DEBUG oslo_vmware.api [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Task: {'id': task-3200356, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2230.521642] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Preparing fetch location {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2230.521913] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Creating directory with path [datastore2] vmware_temp/c570c3b5-9645-4389-9284-e0cbe8858821/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2230.522159] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-81103666-3094-4b91-87d8-16284f63b143 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.532983] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Created directory with path [datastore2] vmware_temp/c570c3b5-9645-4389-9284-e0cbe8858821/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2230.533192] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Fetch image to [datastore2] vmware_temp/c570c3b5-9645-4389-9284-e0cbe8858821/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2230.533365] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to [datastore2] vmware_temp/c570c3b5-9645-4389-9284-e0cbe8858821/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2230.534066] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf2690f5-1622-49bc-8fe6-e7ec33e16a85 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.540449] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6ffc97d-8936-4aa3-8b2f-440431970c15 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.549064] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9404bc47-5630-465f-9d45-653862c5503d {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.579577] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a8693a7-1381-4a9b-b470-e6666e318805 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.587225] env[68040]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-fca4588e-0d06-46a0-adfe-d31b69ca938b {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.593032] env[68040]: DEBUG oslo_vmware.api [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Task: {'id': task-3200356, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.071828} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2230.593263] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Deleted the datastore file {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2230.593437] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Deleted contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2230.593615] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2230.593779] env[68040]: INFO nova.compute.manager [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2230.595913] env[68040]: DEBUG nova.compute.claims [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Aborting claim: {{(pid=68040) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2230.596110] env[68040]: DEBUG oslo_concurrency.lockutils [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2230.596337] env[68040]: DEBUG oslo_concurrency.lockutils [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2230.606600] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2230.658632] env[68040]: DEBUG oslo_vmware.rw_handles [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c570c3b5-9645-4389-9284-e0cbe8858821/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2230.718370] env[68040]: DEBUG oslo_vmware.rw_handles [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Completed reading data from the image iterator. {{(pid=68040) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2230.718564] env[68040]: DEBUG oslo_vmware.rw_handles [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c570c3b5-9645-4389-9284-e0cbe8858821/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2230.747695] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-715fb734-aae2-4d8c-a4c8-c8da58a5610f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.754670] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11d6b25f-0e8e-4c7b-a28f-d74a2a553d2a {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.782766] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a260ca67-5143-48a7-a615-b33a9f0ff1ca {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.789177] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce069a4b-cbf2-431a-97b0-4e928eda6abc {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.801413] env[68040]: DEBUG nova.compute.provider_tree [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2230.810293] env[68040]: DEBUG nova.scheduler.client.report [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2230.823106] env[68040]: DEBUG oslo_concurrency.lockutils [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.227s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2230.823603] env[68040]: ERROR nova.compute.manager [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2230.823603] env[68040]: Faults: ['InvalidArgument'] [ 2230.823603] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Traceback (most recent call last): [ 2230.823603] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2230.823603] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] self.driver.spawn(context, instance, image_meta, [ 2230.823603] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2230.823603] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2230.823603] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2230.823603] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] self._fetch_image_if_missing(context, vi) [ 2230.823603] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2230.823603] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] image_cache(vi, tmp_image_ds_loc) [ 2230.823603] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2230.823995] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] vm_util.copy_virtual_disk( [ 2230.823995] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2230.823995] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] session._wait_for_task(vmdk_copy_task) [ 2230.823995] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2230.823995] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] return self.wait_for_task(task_ref) [ 2230.823995] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2230.823995] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] return evt.wait() [ 2230.823995] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2230.823995] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] result = hub.switch() [ 2230.823995] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2230.823995] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] return self.greenlet.switch() [ 2230.823995] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2230.823995] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] self.f(*self.args, **self.kw) [ 2230.824373] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2230.824373] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] raise exceptions.translate_fault(task_info.error) [ 2230.824373] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2230.824373] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Faults: ['InvalidArgument'] [ 2230.824373] env[68040]: ERROR nova.compute.manager [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] [ 2230.824373] env[68040]: DEBUG nova.compute.utils [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] VimFaultException {{(pid=68040) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2230.825582] env[68040]: DEBUG nova.compute.manager [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Build of instance 8f9a6934-9ded-4561-8d83-aacd4d79f29a was re-scheduled: A specified parameter was not correct: fileType [ 2230.825582] env[68040]: Faults: ['InvalidArgument'] {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2230.825973] env[68040]: DEBUG nova.compute.manager [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Unplugging VIFs for instance {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2230.826171] env[68040]: DEBUG nova.compute.manager [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2230.826349] env[68040]: DEBUG nova.compute.manager [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2230.826518] env[68040]: DEBUG nova.network.neutron [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2231.159761] env[68040]: DEBUG nova.network.neutron [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2231.172365] env[68040]: INFO nova.compute.manager [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Took 0.34 seconds to deallocate network for instance. [ 2231.274791] env[68040]: INFO nova.scheduler.client.report [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Deleted allocations for instance 8f9a6934-9ded-4561-8d83-aacd4d79f29a [ 2231.295247] env[68040]: DEBUG oslo_concurrency.lockutils [None req-24ff330a-c261-461b-a4e2-04d3debc6e1e tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Lock "8f9a6934-9ded-4561-8d83-aacd4d79f29a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 511.102s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2231.295513] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "8f9a6934-9ded-4561-8d83-aacd4d79f29a" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 396.106s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2231.295740] env[68040]: INFO nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] During sync_power_state the instance has a pending task (spawning). Skip. [ 2231.295927] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "8f9a6934-9ded-4561-8d83-aacd4d79f29a" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2231.296445] env[68040]: DEBUG oslo_concurrency.lockutils [None req-103b1d88-38df-4185-8b30-0df4f574425b tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Lock "8f9a6934-9ded-4561-8d83-aacd4d79f29a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 315.118s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2231.296663] env[68040]: DEBUG oslo_concurrency.lockutils [None req-103b1d88-38df-4185-8b30-0df4f574425b tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Acquiring lock "8f9a6934-9ded-4561-8d83-aacd4d79f29a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2231.296901] env[68040]: DEBUG oslo_concurrency.lockutils [None req-103b1d88-38df-4185-8b30-0df4f574425b tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Lock "8f9a6934-9ded-4561-8d83-aacd4d79f29a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2231.297162] env[68040]: DEBUG oslo_concurrency.lockutils [None req-103b1d88-38df-4185-8b30-0df4f574425b tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Lock "8f9a6934-9ded-4561-8d83-aacd4d79f29a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2231.299334] env[68040]: INFO nova.compute.manager [None req-103b1d88-38df-4185-8b30-0df4f574425b tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Terminating instance [ 2231.301886] env[68040]: DEBUG nova.compute.manager [None req-103b1d88-38df-4185-8b30-0df4f574425b tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2231.301886] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-103b1d88-38df-4185-8b30-0df4f574425b tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2231.302156] env[68040]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-406e976a-9095-4d54-b750-ae2aca083630 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2231.312061] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d9cb7bd-9d7b-4f59-a9c0-138d5fc35146 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2231.336488] env[68040]: WARNING nova.virt.vmwareapi.vmops [None req-103b1d88-38df-4185-8b30-0df4f574425b tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8f9a6934-9ded-4561-8d83-aacd4d79f29a could not be found. [ 2231.336695] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-103b1d88-38df-4185-8b30-0df4f574425b tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2231.336881] env[68040]: INFO nova.compute.manager [None req-103b1d88-38df-4185-8b30-0df4f574425b tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2231.337149] env[68040]: DEBUG oslo.service.loopingcall [None req-103b1d88-38df-4185-8b30-0df4f574425b tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2231.337367] env[68040]: DEBUG nova.compute.manager [-] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2231.337463] env[68040]: DEBUG nova.network.neutron [-] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2231.363815] env[68040]: DEBUG nova.network.neutron [-] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2231.371358] env[68040]: INFO nova.compute.manager [-] [instance: 8f9a6934-9ded-4561-8d83-aacd4d79f29a] Took 0.03 seconds to deallocate network for instance. [ 2231.453931] env[68040]: DEBUG oslo_concurrency.lockutils [None req-103b1d88-38df-4185-8b30-0df4f574425b tempest-AttachVolumeNegativeTest-592304569 tempest-AttachVolumeNegativeTest-592304569-project-member] Lock "8f9a6934-9ded-4561-8d83-aacd4d79f29a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.157s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2231.983770] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2237.985837] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2241.984443] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2241.984775] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Starting heal instance info cache {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2241.984775] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Rebuilding the list of instances to heal {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2241.997581] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2241.997885] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2241.998092] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2241.998235] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Didn't find any instances for network info cache update. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2244.983805] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2245.984638] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2246.985081] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2247.984530] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2247.984738] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68040) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2247.984903] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2247.997118] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2247.997450] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2247.997504] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2247.997651] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68040) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2247.998849] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfce0f5a-adc2-43c8-86ce-46b3927e8591 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2248.007544] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69cfd763-19dc-4bf5-84f6-76e3def941bf {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2248.021934] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af08e9f0-0ecf-451c-8756-f3b752e5066f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2248.027903] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d39614bb-550a-491c-bce6-889941bfc690 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2248.055411] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180985MB free_disk=125GB free_vcpus=48 pci_devices=None {{(pid=68040) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2248.055549] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2248.055736] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2248.104331] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 8a1a6866-1439-4f82-9fda-a7d9a7f211a3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2248.104534] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 92b0f3c0-2c87-478d-8b11-f0b05aee12ed actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2248.104680] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance e9994aad-8053-4936-ad4b-5347a1a62f4e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2248.104859] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2248.105007] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=3 pci_stats=[] {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2248.119845] env[68040]: DEBUG nova.scheduler.client.report [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Refreshing inventories for resource provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 2248.132838] env[68040]: DEBUG nova.scheduler.client.report [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Updating ProviderTree inventory for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 2248.133052] env[68040]: DEBUG nova.compute.provider_tree [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Updating inventory in ProviderTree for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2248.143040] env[68040]: DEBUG nova.scheduler.client.report [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Refreshing aggregate associations for resource provider 22db6f73-b3da-436a-bf40-9c8c240b2e44, aggregates: None {{(pid=68040) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 2248.161205] env[68040]: DEBUG nova.scheduler.client.report [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Refreshing trait associations for resource provider 22db6f73-b3da-436a-bf40-9c8c240b2e44, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK {{(pid=68040) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 2248.204224] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-731470c1-333f-487c-948a-00d7e246a8be {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2248.211554] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be4ef3f1-f8ec-417b-b5fe-446aa79269f2 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2248.241294] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5690d2b8-3f61-431a-937f-632172143ac4 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2248.247659] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb6282eb-d6ac-421f-b0b0-e78c125a2c3e {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2248.260649] env[68040]: DEBUG nova.compute.provider_tree [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2248.269233] env[68040]: DEBUG nova.scheduler.client.report [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2248.282621] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68040) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2248.282793] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.227s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2251.277725] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2257.561427] env[68040]: DEBUG oslo_concurrency.lockutils [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Acquiring lock "d0134198-5d43-47de-a1fc-490cca429e55" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2257.561765] env[68040]: DEBUG oslo_concurrency.lockutils [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Lock "d0134198-5d43-47de-a1fc-490cca429e55" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2257.572342] env[68040]: DEBUG nova.compute.manager [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: d0134198-5d43-47de-a1fc-490cca429e55] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2257.620498] env[68040]: DEBUG oslo_concurrency.lockutils [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2257.620738] env[68040]: DEBUG oslo_concurrency.lockutils [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2257.622181] env[68040]: INFO nova.compute.claims [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: d0134198-5d43-47de-a1fc-490cca429e55] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2257.719495] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7349a4e0-07ed-47b6-857c-941aedc7fb30 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2257.727144] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba13dedc-0e10-42c1-a585-5fd296a16d6d {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2257.757099] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bd471fa-11cf-43a9-8f31-2f03359cbccb {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2257.763964] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a87005d6-5ad5-4b78-95e7-eaa99540092b {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2257.776871] env[68040]: DEBUG nova.compute.provider_tree [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2257.785922] env[68040]: DEBUG nova.scheduler.client.report [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2257.799086] env[68040]: DEBUG oslo_concurrency.lockutils [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.178s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2257.799622] env[68040]: DEBUG nova.compute.manager [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: d0134198-5d43-47de-a1fc-490cca429e55] Start building networks asynchronously for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2257.830183] env[68040]: DEBUG nova.compute.utils [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Using /dev/sd instead of None {{(pid=68040) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2257.831325] env[68040]: DEBUG nova.compute.manager [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: d0134198-5d43-47de-a1fc-490cca429e55] Allocating IP information in the background. {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2257.831501] env[68040]: DEBUG nova.network.neutron [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: d0134198-5d43-47de-a1fc-490cca429e55] allocate_for_instance() {{(pid=68040) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2257.842031] env[68040]: DEBUG nova.compute.manager [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: d0134198-5d43-47de-a1fc-490cca429e55] Start building block device mappings for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2257.917011] env[68040]: DEBUG nova.policy [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2d827affb8fa4ee6abe00918076b629e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ba36e75b6181468a80999043bb27346c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68040) authorize /opt/stack/nova/nova/policy.py:203}} [ 2257.923369] env[68040]: DEBUG nova.compute.manager [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: d0134198-5d43-47de-a1fc-490cca429e55] Start spawning the instance on the hypervisor. {{(pid=68040) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2257.951622] env[68040]: DEBUG nova.virt.hardware [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:59:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:59:33Z,direct_url=,disk_format='vmdk',id=8c308313-03d5-40b6-a5fe-9037e32dc76e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0770d674a39c40089de0aade9440b370',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:59:34Z,virtual_size=,visibility=), allow threads: False {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2257.951885] env[68040]: DEBUG nova.virt.hardware [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Flavor limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2257.952260] env[68040]: DEBUG nova.virt.hardware [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Image limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2257.952330] env[68040]: DEBUG nova.virt.hardware [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Flavor pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2257.952438] env[68040]: DEBUG nova.virt.hardware [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Image pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2257.952591] env[68040]: DEBUG nova.virt.hardware [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2257.952801] env[68040]: DEBUG nova.virt.hardware [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2257.952971] env[68040]: DEBUG nova.virt.hardware [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2257.953146] env[68040]: DEBUG nova.virt.hardware [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Got 1 possible topologies {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2257.953313] env[68040]: DEBUG nova.virt.hardware [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2257.953491] env[68040]: DEBUG nova.virt.hardware [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2257.954382] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0276d258-2146-4494-afba-da4ff3b6c81c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2257.965029] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ced4de5d-4c1c-466d-9888-89e30e4b7e20 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2258.245498] env[68040]: DEBUG nova.network.neutron [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: d0134198-5d43-47de-a1fc-490cca429e55] Successfully created port: e4a8881e-28f8-45b2-97de-b8a6127ac2fb {{(pid=68040) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2258.802270] env[68040]: DEBUG nova.compute.manager [req-6f9a8d25-a8dd-41c9-8a51-2ad5d3da0114 req-14cb2733-a1f0-40e2-bd86-02d5a7080904 service nova] [instance: d0134198-5d43-47de-a1fc-490cca429e55] Received event network-vif-plugged-e4a8881e-28f8-45b2-97de-b8a6127ac2fb {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2258.802527] env[68040]: DEBUG oslo_concurrency.lockutils [req-6f9a8d25-a8dd-41c9-8a51-2ad5d3da0114 req-14cb2733-a1f0-40e2-bd86-02d5a7080904 service nova] Acquiring lock "d0134198-5d43-47de-a1fc-490cca429e55-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2258.802725] env[68040]: DEBUG oslo_concurrency.lockutils [req-6f9a8d25-a8dd-41c9-8a51-2ad5d3da0114 req-14cb2733-a1f0-40e2-bd86-02d5a7080904 service nova] Lock "d0134198-5d43-47de-a1fc-490cca429e55-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2258.802975] env[68040]: DEBUG oslo_concurrency.lockutils [req-6f9a8d25-a8dd-41c9-8a51-2ad5d3da0114 req-14cb2733-a1f0-40e2-bd86-02d5a7080904 service nova] Lock "d0134198-5d43-47de-a1fc-490cca429e55-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2258.803170] env[68040]: DEBUG nova.compute.manager [req-6f9a8d25-a8dd-41c9-8a51-2ad5d3da0114 req-14cb2733-a1f0-40e2-bd86-02d5a7080904 service nova] [instance: d0134198-5d43-47de-a1fc-490cca429e55] No waiting events found dispatching network-vif-plugged-e4a8881e-28f8-45b2-97de-b8a6127ac2fb {{(pid=68040) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2258.803411] env[68040]: WARNING nova.compute.manager [req-6f9a8d25-a8dd-41c9-8a51-2ad5d3da0114 req-14cb2733-a1f0-40e2-bd86-02d5a7080904 service nova] [instance: d0134198-5d43-47de-a1fc-490cca429e55] Received unexpected event network-vif-plugged-e4a8881e-28f8-45b2-97de-b8a6127ac2fb for instance with vm_state building and task_state spawning. [ 2258.861565] env[68040]: DEBUG nova.network.neutron [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: d0134198-5d43-47de-a1fc-490cca429e55] Successfully updated port: e4a8881e-28f8-45b2-97de-b8a6127ac2fb {{(pid=68040) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2258.873941] env[68040]: DEBUG oslo_concurrency.lockutils [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Acquiring lock "refresh_cache-d0134198-5d43-47de-a1fc-490cca429e55" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2258.873941] env[68040]: DEBUG oslo_concurrency.lockutils [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Acquired lock "refresh_cache-d0134198-5d43-47de-a1fc-490cca429e55" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2258.874101] env[68040]: DEBUG nova.network.neutron [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: d0134198-5d43-47de-a1fc-490cca429e55] Building network info cache for instance {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2258.919518] env[68040]: DEBUG nova.network.neutron [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: d0134198-5d43-47de-a1fc-490cca429e55] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2259.081169] env[68040]: DEBUG nova.network.neutron [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: d0134198-5d43-47de-a1fc-490cca429e55] Updating instance_info_cache with network_info: [{"id": "e4a8881e-28f8-45b2-97de-b8a6127ac2fb", "address": "fa:16:3e:ce:3d:53", "network": {"id": "8ab8aca2-f199-4056-a7a7-9df3bd781a4a", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-576267632-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba36e75b6181468a80999043bb27346c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49a4d142-3f97-47fe-b074-58923c46815e", "external-id": "nsx-vlan-transportzone-565", "segmentation_id": 565, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4a8881e-28", "ovs_interfaceid": "e4a8881e-28f8-45b2-97de-b8a6127ac2fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2259.093851] env[68040]: DEBUG oslo_concurrency.lockutils [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Releasing lock "refresh_cache-d0134198-5d43-47de-a1fc-490cca429e55" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2259.094156] env[68040]: DEBUG nova.compute.manager [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: d0134198-5d43-47de-a1fc-490cca429e55] Instance network_info: |[{"id": "e4a8881e-28f8-45b2-97de-b8a6127ac2fb", "address": "fa:16:3e:ce:3d:53", "network": {"id": "8ab8aca2-f199-4056-a7a7-9df3bd781a4a", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-576267632-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba36e75b6181468a80999043bb27346c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49a4d142-3f97-47fe-b074-58923c46815e", "external-id": "nsx-vlan-transportzone-565", "segmentation_id": 565, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4a8881e-28", "ovs_interfaceid": "e4a8881e-28f8-45b2-97de-b8a6127ac2fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2259.094592] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: d0134198-5d43-47de-a1fc-490cca429e55] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ce:3d:53', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '49a4d142-3f97-47fe-b074-58923c46815e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e4a8881e-28f8-45b2-97de-b8a6127ac2fb', 'vif_model': 'vmxnet3'}] {{(pid=68040) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2259.102406] env[68040]: DEBUG oslo.service.loopingcall [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2259.102854] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d0134198-5d43-47de-a1fc-490cca429e55] Creating VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2259.103118] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a7281c50-03b5-44c7-bd0d-51f8124b0f81 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2259.125023] env[68040]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2259.125023] env[68040]: value = "task-3200357" [ 2259.125023] env[68040]: _type = "Task" [ 2259.125023] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2259.132665] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200357, 'name': CreateVM_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2259.635826] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200357, 'name': CreateVM_Task, 'duration_secs': 0.29318} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2259.636008] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d0134198-5d43-47de-a1fc-490cca429e55] Created VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2259.636687] env[68040]: DEBUG oslo_concurrency.lockutils [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2259.636856] env[68040]: DEBUG oslo_concurrency.lockutils [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2259.637204] env[68040]: DEBUG oslo_concurrency.lockutils [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2259.637452] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40d56a56-5dab-4287-b84d-09504c9d4470 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2259.642245] env[68040]: DEBUG oslo_vmware.api [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Waiting for the task: (returnval){ [ 2259.642245] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]5238d19d-e617-ba29-4c7e-6d65cd544aee" [ 2259.642245] env[68040]: _type = "Task" [ 2259.642245] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2259.649832] env[68040]: DEBUG oslo_vmware.api [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]5238d19d-e617-ba29-4c7e-6d65cd544aee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2260.152598] env[68040]: DEBUG oslo_concurrency.lockutils [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2260.152925] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: d0134198-5d43-47de-a1fc-490cca429e55] Processing image 8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2260.153094] env[68040]: DEBUG oslo_concurrency.lockutils [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2260.827125] env[68040]: DEBUG nova.compute.manager [req-2416c6e3-cf73-484e-a1c0-b78834b44691 req-21505a03-378e-4889-a146-4f71357979d3 service nova] [instance: d0134198-5d43-47de-a1fc-490cca429e55] Received event network-changed-e4a8881e-28f8-45b2-97de-b8a6127ac2fb {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2260.827338] env[68040]: DEBUG nova.compute.manager [req-2416c6e3-cf73-484e-a1c0-b78834b44691 req-21505a03-378e-4889-a146-4f71357979d3 service nova] [instance: d0134198-5d43-47de-a1fc-490cca429e55] Refreshing instance network info cache due to event network-changed-e4a8881e-28f8-45b2-97de-b8a6127ac2fb. {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2260.827558] env[68040]: DEBUG oslo_concurrency.lockutils [req-2416c6e3-cf73-484e-a1c0-b78834b44691 req-21505a03-378e-4889-a146-4f71357979d3 service nova] Acquiring lock "refresh_cache-d0134198-5d43-47de-a1fc-490cca429e55" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2260.827706] env[68040]: DEBUG oslo_concurrency.lockutils [req-2416c6e3-cf73-484e-a1c0-b78834b44691 req-21505a03-378e-4889-a146-4f71357979d3 service nova] Acquired lock "refresh_cache-d0134198-5d43-47de-a1fc-490cca429e55" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2260.827867] env[68040]: DEBUG nova.network.neutron [req-2416c6e3-cf73-484e-a1c0-b78834b44691 req-21505a03-378e-4889-a146-4f71357979d3 service nova] [instance: d0134198-5d43-47de-a1fc-490cca429e55] Refreshing network info cache for port e4a8881e-28f8-45b2-97de-b8a6127ac2fb {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2261.074480] env[68040]: DEBUG nova.network.neutron [req-2416c6e3-cf73-484e-a1c0-b78834b44691 req-21505a03-378e-4889-a146-4f71357979d3 service nova] [instance: d0134198-5d43-47de-a1fc-490cca429e55] Updated VIF entry in instance network info cache for port e4a8881e-28f8-45b2-97de-b8a6127ac2fb. {{(pid=68040) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2261.074834] env[68040]: DEBUG nova.network.neutron [req-2416c6e3-cf73-484e-a1c0-b78834b44691 req-21505a03-378e-4889-a146-4f71357979d3 service nova] [instance: d0134198-5d43-47de-a1fc-490cca429e55] Updating instance_info_cache with network_info: [{"id": "e4a8881e-28f8-45b2-97de-b8a6127ac2fb", "address": "fa:16:3e:ce:3d:53", "network": {"id": "8ab8aca2-f199-4056-a7a7-9df3bd781a4a", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-576267632-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba36e75b6181468a80999043bb27346c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "49a4d142-3f97-47fe-b074-58923c46815e", "external-id": "nsx-vlan-transportzone-565", "segmentation_id": 565, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4a8881e-28", "ovs_interfaceid": "e4a8881e-28f8-45b2-97de-b8a6127ac2fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2261.084033] env[68040]: DEBUG oslo_concurrency.lockutils [req-2416c6e3-cf73-484e-a1c0-b78834b44691 req-21505a03-378e-4889-a146-4f71357979d3 service nova] Releasing lock "refresh_cache-d0134198-5d43-47de-a1fc-490cca429e55" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2278.866068] env[68040]: WARNING oslo_vmware.rw_handles [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2278.866068] env[68040]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2278.866068] env[68040]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2278.866068] env[68040]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2278.866068] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2278.866068] env[68040]: ERROR oslo_vmware.rw_handles response.begin() [ 2278.866068] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2278.866068] env[68040]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2278.866068] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2278.866068] env[68040]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2278.866068] env[68040]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2278.866068] env[68040]: ERROR oslo_vmware.rw_handles [ 2278.866858] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Downloaded image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to vmware_temp/c570c3b5-9645-4389-9284-e0cbe8858821/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2278.868350] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Caching image {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2278.868629] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Copying Virtual Disk [datastore2] vmware_temp/c570c3b5-9645-4389-9284-e0cbe8858821/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk to [datastore2] vmware_temp/c570c3b5-9645-4389-9284-e0cbe8858821/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk {{(pid=68040) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2278.868923] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-680c3ec5-a77f-4719-9002-273ed62ba8a7 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2278.877784] env[68040]: DEBUG oslo_vmware.api [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Waiting for the task: (returnval){ [ 2278.877784] env[68040]: value = "task-3200358" [ 2278.877784] env[68040]: _type = "Task" [ 2278.877784] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2278.885297] env[68040]: DEBUG oslo_vmware.api [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Task: {'id': task-3200358, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2279.387840] env[68040]: DEBUG oslo_vmware.exceptions [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Fault InvalidArgument not matched. {{(pid=68040) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2279.388131] env[68040]: DEBUG oslo_concurrency.lockutils [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2279.388724] env[68040]: ERROR nova.compute.manager [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2279.388724] env[68040]: Faults: ['InvalidArgument'] [ 2279.388724] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Traceback (most recent call last): [ 2279.388724] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2279.388724] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] yield resources [ 2279.388724] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2279.388724] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] self.driver.spawn(context, instance, image_meta, [ 2279.388724] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2279.388724] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2279.388724] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2279.388724] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] self._fetch_image_if_missing(context, vi) [ 2279.388724] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2279.389357] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] image_cache(vi, tmp_image_ds_loc) [ 2279.389357] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2279.389357] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] vm_util.copy_virtual_disk( [ 2279.389357] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2279.389357] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] session._wait_for_task(vmdk_copy_task) [ 2279.389357] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2279.389357] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] return self.wait_for_task(task_ref) [ 2279.389357] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2279.389357] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] return evt.wait() [ 2279.389357] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2279.389357] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] result = hub.switch() [ 2279.389357] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2279.389357] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] return self.greenlet.switch() [ 2279.389988] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2279.389988] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] self.f(*self.args, **self.kw) [ 2279.389988] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2279.389988] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] raise exceptions.translate_fault(task_info.error) [ 2279.389988] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2279.389988] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Faults: ['InvalidArgument'] [ 2279.389988] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] [ 2279.389988] env[68040]: INFO nova.compute.manager [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Terminating instance [ 2279.390631] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2279.390849] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2279.392175] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-13b39cef-65a7-410d-bc02-6e00baa3df7e {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2279.394423] env[68040]: DEBUG nova.compute.manager [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2279.394624] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2279.395382] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9b6356a-16ac-4ea1-a7b4-fe16fa345088 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2279.402140] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Unregistering the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2279.402355] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-639e21a3-6f56-4b96-a7d6-310318c3f1bb {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2279.404516] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2279.404709] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68040) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2279.406037] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-08555cdc-5fb7-4463-9e6b-92b89d8bfb91 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2279.410672] env[68040]: DEBUG oslo_vmware.api [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Waiting for the task: (returnval){ [ 2279.410672] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]523a5f63-eab4-ad18-2dad-0354626d51a2" [ 2279.410672] env[68040]: _type = "Task" [ 2279.410672] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2279.418757] env[68040]: DEBUG oslo_vmware.api [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]523a5f63-eab4-ad18-2dad-0354626d51a2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2279.475486] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Unregistered the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2279.475706] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Deleting contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2279.475889] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Deleting the datastore file [datastore2] 8a1a6866-1439-4f82-9fda-a7d9a7f211a3 {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2279.476178] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b2b31522-3740-492c-a867-356f125f7cb4 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2279.482353] env[68040]: DEBUG oslo_vmware.api [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Waiting for the task: (returnval){ [ 2279.482353] env[68040]: value = "task-3200360" [ 2279.482353] env[68040]: _type = "Task" [ 2279.482353] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2279.489489] env[68040]: DEBUG oslo_vmware.api [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Task: {'id': task-3200360, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2279.920960] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Preparing fetch location {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2279.921336] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Creating directory with path [datastore2] vmware_temp/c17f2fcd-bb86-43ec-8001-92cc49a157ff/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2279.921463] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b891abce-fa3f-4657-9249-198cce5edeb3 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2279.932729] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Created directory with path [datastore2] vmware_temp/c17f2fcd-bb86-43ec-8001-92cc49a157ff/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2279.932933] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Fetch image to [datastore2] vmware_temp/c17f2fcd-bb86-43ec-8001-92cc49a157ff/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2279.933087] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to [datastore2] vmware_temp/c17f2fcd-bb86-43ec-8001-92cc49a157ff/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2279.933781] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-978121dd-e040-4fc8-810c-c3a9ff9b3052 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2279.940245] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7badda19-1856-4312-b795-cd4e2a44d436 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2279.949218] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab1df159-6377-4a95-9b44-b4286348e64c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2279.979019] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dd5d584-f715-426c-aa82-ef75752c935c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2279.986903] env[68040]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-8fd659f2-20e6-4d9c-9164-c1dac224f6f5 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2279.992754] env[68040]: DEBUG oslo_vmware.api [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Task: {'id': task-3200360, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.067244} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2279.992977] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Deleted the datastore file {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2279.993218] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Deleted contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2279.993410] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2279.993633] env[68040]: INFO nova.compute.manager [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2279.995781] env[68040]: DEBUG nova.compute.claims [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Aborting claim: {{(pid=68040) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2279.995955] env[68040]: DEBUG oslo_concurrency.lockutils [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2279.996252] env[68040]: DEBUG oslo_concurrency.lockutils [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2280.006573] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2280.058632] env[68040]: DEBUG oslo_vmware.rw_handles [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c17f2fcd-bb86-43ec-8001-92cc49a157ff/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2280.118719] env[68040]: DEBUG oslo_vmware.rw_handles [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Completed reading data from the image iterator. {{(pid=68040) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2280.118820] env[68040]: DEBUG oslo_vmware.rw_handles [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c17f2fcd-bb86-43ec-8001-92cc49a157ff/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2280.159243] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69a1ac78-3015-4a62-8a19-e49aaa82592e {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2280.166546] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b41edc8-b6c6-402c-969f-fb84074748ea {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2280.196697] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-662a938d-ca7b-4d58-8596-6d7b498facd2 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2280.203587] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12586086-0312-4c91-8ef3-9f521df6e5e5 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2280.216474] env[68040]: DEBUG nova.compute.provider_tree [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2280.226746] env[68040]: DEBUG nova.scheduler.client.report [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2280.239239] env[68040]: DEBUG oslo_concurrency.lockutils [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.243s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2280.239756] env[68040]: ERROR nova.compute.manager [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2280.239756] env[68040]: Faults: ['InvalidArgument'] [ 2280.239756] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Traceback (most recent call last): [ 2280.239756] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2280.239756] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] self.driver.spawn(context, instance, image_meta, [ 2280.239756] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2280.239756] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2280.239756] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2280.239756] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] self._fetch_image_if_missing(context, vi) [ 2280.239756] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2280.239756] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] image_cache(vi, tmp_image_ds_loc) [ 2280.239756] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2280.240201] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] vm_util.copy_virtual_disk( [ 2280.240201] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2280.240201] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] session._wait_for_task(vmdk_copy_task) [ 2280.240201] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2280.240201] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] return self.wait_for_task(task_ref) [ 2280.240201] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2280.240201] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] return evt.wait() [ 2280.240201] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2280.240201] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] result = hub.switch() [ 2280.240201] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2280.240201] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] return self.greenlet.switch() [ 2280.240201] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2280.240201] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] self.f(*self.args, **self.kw) [ 2280.240629] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2280.240629] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] raise exceptions.translate_fault(task_info.error) [ 2280.240629] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2280.240629] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Faults: ['InvalidArgument'] [ 2280.240629] env[68040]: ERROR nova.compute.manager [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] [ 2280.240629] env[68040]: DEBUG nova.compute.utils [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] VimFaultException {{(pid=68040) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2280.241815] env[68040]: DEBUG nova.compute.manager [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Build of instance 8a1a6866-1439-4f82-9fda-a7d9a7f211a3 was re-scheduled: A specified parameter was not correct: fileType [ 2280.241815] env[68040]: Faults: ['InvalidArgument'] {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2280.242210] env[68040]: DEBUG nova.compute.manager [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Unplugging VIFs for instance {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2280.242386] env[68040]: DEBUG nova.compute.manager [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2280.242564] env[68040]: DEBUG nova.compute.manager [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2280.242727] env[68040]: DEBUG nova.network.neutron [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2280.537730] env[68040]: DEBUG nova.network.neutron [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2280.550268] env[68040]: INFO nova.compute.manager [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Took 0.31 seconds to deallocate network for instance. [ 2280.641354] env[68040]: INFO nova.scheduler.client.report [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Deleted allocations for instance 8a1a6866-1439-4f82-9fda-a7d9a7f211a3 [ 2280.662747] env[68040]: DEBUG oslo_concurrency.lockutils [None req-984fa958-af9f-4ef5-bc92-bf1debbbcc21 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Lock "8a1a6866-1439-4f82-9fda-a7d9a7f211a3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 416.539s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2280.663011] env[68040]: DEBUG oslo_concurrency.lockutils [None req-9a7c29b1-f776-4e01-b30c-c3de9a2ce7ff tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Lock "8a1a6866-1439-4f82-9fda-a7d9a7f211a3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 220.980s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2280.663357] env[68040]: DEBUG oslo_concurrency.lockutils [None req-9a7c29b1-f776-4e01-b30c-c3de9a2ce7ff tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Acquiring lock "8a1a6866-1439-4f82-9fda-a7d9a7f211a3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2280.663585] env[68040]: DEBUG oslo_concurrency.lockutils [None req-9a7c29b1-f776-4e01-b30c-c3de9a2ce7ff tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Lock "8a1a6866-1439-4f82-9fda-a7d9a7f211a3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2280.663760] env[68040]: DEBUG oslo_concurrency.lockutils [None req-9a7c29b1-f776-4e01-b30c-c3de9a2ce7ff tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Lock "8a1a6866-1439-4f82-9fda-a7d9a7f211a3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2280.665736] env[68040]: INFO nova.compute.manager [None req-9a7c29b1-f776-4e01-b30c-c3de9a2ce7ff tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Terminating instance [ 2280.667563] env[68040]: DEBUG nova.compute.manager [None req-9a7c29b1-f776-4e01-b30c-c3de9a2ce7ff tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2280.667764] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-9a7c29b1-f776-4e01-b30c-c3de9a2ce7ff tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2280.668260] env[68040]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-abf0dc0d-3811-466d-9eb3-f3fba671cd9a {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2280.678453] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d212b48-41db-4068-8906-9a710609dc96 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2280.705973] env[68040]: WARNING nova.virt.vmwareapi.vmops [None req-9a7c29b1-f776-4e01-b30c-c3de9a2ce7ff tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8a1a6866-1439-4f82-9fda-a7d9a7f211a3 could not be found. [ 2280.706088] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-9a7c29b1-f776-4e01-b30c-c3de9a2ce7ff tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2280.706265] env[68040]: INFO nova.compute.manager [None req-9a7c29b1-f776-4e01-b30c-c3de9a2ce7ff tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2280.706496] env[68040]: DEBUG oslo.service.loopingcall [None req-9a7c29b1-f776-4e01-b30c-c3de9a2ce7ff tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2280.707753] env[68040]: DEBUG nova.compute.manager [-] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2280.707753] env[68040]: DEBUG nova.network.neutron [-] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2280.732699] env[68040]: DEBUG nova.network.neutron [-] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2280.740802] env[68040]: INFO nova.compute.manager [-] [instance: 8a1a6866-1439-4f82-9fda-a7d9a7f211a3] Took 0.03 seconds to deallocate network for instance. [ 2280.824633] env[68040]: DEBUG oslo_concurrency.lockutils [None req-9a7c29b1-f776-4e01-b30c-c3de9a2ce7ff tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Lock "8a1a6866-1439-4f82-9fda-a7d9a7f211a3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.161s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2281.353937] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Acquiring lock "26903491-00c6-4726-b2a6-4d1f482d8785" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2281.353937] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Lock "26903491-00c6-4726-b2a6-4d1f482d8785" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2281.363563] env[68040]: DEBUG nova.compute.manager [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 26903491-00c6-4726-b2a6-4d1f482d8785] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2281.411512] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2281.411764] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2281.413269] env[68040]: INFO nova.compute.claims [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 26903491-00c6-4726-b2a6-4d1f482d8785] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2281.533400] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdb2c5f6-c0b4-446a-b86d-d8e9521f758d {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2281.541423] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78d5c2de-d432-429f-94a2-7f29caa86202 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2281.572554] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5c44fc4-cf9b-46ee-ac3b-b4af7ef0042f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2281.579686] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa76fe61-a14b-4dc6-b173-dcdece63fdf4 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2281.592190] env[68040]: DEBUG nova.compute.provider_tree [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2281.601699] env[68040]: DEBUG nova.scheduler.client.report [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2281.614184] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.202s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2281.614643] env[68040]: DEBUG nova.compute.manager [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 26903491-00c6-4726-b2a6-4d1f482d8785] Start building networks asynchronously for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2281.646627] env[68040]: DEBUG nova.compute.utils [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Using /dev/sd instead of None {{(pid=68040) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2281.647861] env[68040]: DEBUG nova.compute.manager [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 26903491-00c6-4726-b2a6-4d1f482d8785] Allocating IP information in the background. {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2281.648047] env[68040]: DEBUG nova.network.neutron [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 26903491-00c6-4726-b2a6-4d1f482d8785] allocate_for_instance() {{(pid=68040) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2281.658506] env[68040]: DEBUG nova.compute.manager [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 26903491-00c6-4726-b2a6-4d1f482d8785] Start building block device mappings for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2281.713133] env[68040]: DEBUG nova.policy [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c8e66b0d9ada4cabbb8efd2e8340a3a9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '52dbf578e94a4db7af130703ad4eb741', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68040) authorize /opt/stack/nova/nova/policy.py:203}} [ 2281.719716] env[68040]: DEBUG nova.compute.manager [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 26903491-00c6-4726-b2a6-4d1f482d8785] Start spawning the instance on the hypervisor. {{(pid=68040) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2281.745043] env[68040]: DEBUG nova.virt.hardware [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:59:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:59:33Z,direct_url=,disk_format='vmdk',id=8c308313-03d5-40b6-a5fe-9037e32dc76e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0770d674a39c40089de0aade9440b370',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:59:34Z,virtual_size=,visibility=), allow threads: False {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2281.745043] env[68040]: DEBUG nova.virt.hardware [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Flavor limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2281.745043] env[68040]: DEBUG nova.virt.hardware [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Image limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2281.745277] env[68040]: DEBUG nova.virt.hardware [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Flavor pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2281.745360] env[68040]: DEBUG nova.virt.hardware [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Image pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2281.745514] env[68040]: DEBUG nova.virt.hardware [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2281.745728] env[68040]: DEBUG nova.virt.hardware [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2281.745894] env[68040]: DEBUG nova.virt.hardware [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2281.746076] env[68040]: DEBUG nova.virt.hardware [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Got 1 possible topologies {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2281.746246] env[68040]: DEBUG nova.virt.hardware [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2281.746444] env[68040]: DEBUG nova.virt.hardware [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2281.747343] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a60f5a7a-3e1e-4d15-9107-1ef90973775d {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2281.755669] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ca635a0-a1ad-4e92-8f3f-96132967f70f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.009944] env[68040]: DEBUG nova.network.neutron [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 26903491-00c6-4726-b2a6-4d1f482d8785] Successfully created port: 9e9357d5-3f89-498f-a9ef-f9644bc86de8 {{(pid=68040) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2282.659085] env[68040]: DEBUG nova.compute.manager [req-f39823c9-33f2-49fb-823d-96f32b3990f9 req-caa3df07-5321-4122-ac09-e564d0f6cf71 service nova] [instance: 26903491-00c6-4726-b2a6-4d1f482d8785] Received event network-vif-plugged-9e9357d5-3f89-498f-a9ef-f9644bc86de8 {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2282.659085] env[68040]: DEBUG oslo_concurrency.lockutils [req-f39823c9-33f2-49fb-823d-96f32b3990f9 req-caa3df07-5321-4122-ac09-e564d0f6cf71 service nova] Acquiring lock "26903491-00c6-4726-b2a6-4d1f482d8785-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2282.659085] env[68040]: DEBUG oslo_concurrency.lockutils [req-f39823c9-33f2-49fb-823d-96f32b3990f9 req-caa3df07-5321-4122-ac09-e564d0f6cf71 service nova] Lock "26903491-00c6-4726-b2a6-4d1f482d8785-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2282.659085] env[68040]: DEBUG oslo_concurrency.lockutils [req-f39823c9-33f2-49fb-823d-96f32b3990f9 req-caa3df07-5321-4122-ac09-e564d0f6cf71 service nova] Lock "26903491-00c6-4726-b2a6-4d1f482d8785-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2282.659703] env[68040]: DEBUG nova.compute.manager [req-f39823c9-33f2-49fb-823d-96f32b3990f9 req-caa3df07-5321-4122-ac09-e564d0f6cf71 service nova] [instance: 26903491-00c6-4726-b2a6-4d1f482d8785] No waiting events found dispatching network-vif-plugged-9e9357d5-3f89-498f-a9ef-f9644bc86de8 {{(pid=68040) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2282.659703] env[68040]: WARNING nova.compute.manager [req-f39823c9-33f2-49fb-823d-96f32b3990f9 req-caa3df07-5321-4122-ac09-e564d0f6cf71 service nova] [instance: 26903491-00c6-4726-b2a6-4d1f482d8785] Received unexpected event network-vif-plugged-9e9357d5-3f89-498f-a9ef-f9644bc86de8 for instance with vm_state building and task_state spawning. [ 2282.742170] env[68040]: DEBUG nova.network.neutron [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 26903491-00c6-4726-b2a6-4d1f482d8785] Successfully updated port: 9e9357d5-3f89-498f-a9ef-f9644bc86de8 {{(pid=68040) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2282.754962] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Acquiring lock "refresh_cache-26903491-00c6-4726-b2a6-4d1f482d8785" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2282.754962] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Acquired lock "refresh_cache-26903491-00c6-4726-b2a6-4d1f482d8785" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2282.754962] env[68040]: DEBUG nova.network.neutron [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 26903491-00c6-4726-b2a6-4d1f482d8785] Building network info cache for instance {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2282.796058] env[68040]: DEBUG nova.network.neutron [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 26903491-00c6-4726-b2a6-4d1f482d8785] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2282.957022] env[68040]: DEBUG nova.network.neutron [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 26903491-00c6-4726-b2a6-4d1f482d8785] Updating instance_info_cache with network_info: [{"id": "9e9357d5-3f89-498f-a9ef-f9644bc86de8", "address": "fa:16:3e:6e:ad:85", "network": {"id": "9839fe48-68c5-4649-bd83-6b4d9c6008e8", "bridge": "br-int", "label": "tempest-ServersTestJSON-1965746643-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52dbf578e94a4db7af130703ad4eb741", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4b033f4d-2e92-4702-add6-410a29d3f251", "external-id": "nsx-vlan-transportzone-649", "segmentation_id": 649, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e9357d5-3f", "ovs_interfaceid": "9e9357d5-3f89-498f-a9ef-f9644bc86de8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2282.967861] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Releasing lock "refresh_cache-26903491-00c6-4726-b2a6-4d1f482d8785" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2282.968158] env[68040]: DEBUG nova.compute.manager [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 26903491-00c6-4726-b2a6-4d1f482d8785] Instance network_info: |[{"id": "9e9357d5-3f89-498f-a9ef-f9644bc86de8", "address": "fa:16:3e:6e:ad:85", "network": {"id": "9839fe48-68c5-4649-bd83-6b4d9c6008e8", "bridge": "br-int", "label": "tempest-ServersTestJSON-1965746643-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52dbf578e94a4db7af130703ad4eb741", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4b033f4d-2e92-4702-add6-410a29d3f251", "external-id": "nsx-vlan-transportzone-649", "segmentation_id": 649, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e9357d5-3f", "ovs_interfaceid": "9e9357d5-3f89-498f-a9ef-f9644bc86de8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2282.968577] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 26903491-00c6-4726-b2a6-4d1f482d8785] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6e:ad:85', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4b033f4d-2e92-4702-add6-410a29d3f251', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9e9357d5-3f89-498f-a9ef-f9644bc86de8', 'vif_model': 'vmxnet3'}] {{(pid=68040) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2282.976039] env[68040]: DEBUG oslo.service.loopingcall [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2282.976496] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 26903491-00c6-4726-b2a6-4d1f482d8785] Creating VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2282.976738] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-811a92e0-4463-4562-92ae-c8be6eb0b5cb {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.996948] env[68040]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2282.996948] env[68040]: value = "task-3200361" [ 2282.996948] env[68040]: _type = "Task" [ 2282.996948] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2283.004666] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200361, 'name': CreateVM_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2283.508348] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200361, 'name': CreateVM_Task, 'duration_secs': 0.293136} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2283.508623] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 26903491-00c6-4726-b2a6-4d1f482d8785] Created VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2283.509276] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2283.509444] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2283.509760] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2283.510030] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-04e97d7c-867c-40e3-b7ee-f73314f21fec {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.514400] env[68040]: DEBUG oslo_vmware.api [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Waiting for the task: (returnval){ [ 2283.514400] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52175a2f-12ef-b643-1347-a72d40b479b7" [ 2283.514400] env[68040]: _type = "Task" [ 2283.514400] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2283.521836] env[68040]: DEBUG oslo_vmware.api [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52175a2f-12ef-b643-1347-a72d40b479b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2284.025317] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2284.025679] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 26903491-00c6-4726-b2a6-4d1f482d8785] Processing image 8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2284.025789] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2284.682855] env[68040]: DEBUG nova.compute.manager [req-6321c15d-f739-4d3b-bb49-3fed9a155257 req-d5a89829-edad-4906-a45e-cabec6288304 service nova] [instance: 26903491-00c6-4726-b2a6-4d1f482d8785] Received event network-changed-9e9357d5-3f89-498f-a9ef-f9644bc86de8 {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2284.683121] env[68040]: DEBUG nova.compute.manager [req-6321c15d-f739-4d3b-bb49-3fed9a155257 req-d5a89829-edad-4906-a45e-cabec6288304 service nova] [instance: 26903491-00c6-4726-b2a6-4d1f482d8785] Refreshing instance network info cache due to event network-changed-9e9357d5-3f89-498f-a9ef-f9644bc86de8. {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2284.683342] env[68040]: DEBUG oslo_concurrency.lockutils [req-6321c15d-f739-4d3b-bb49-3fed9a155257 req-d5a89829-edad-4906-a45e-cabec6288304 service nova] Acquiring lock "refresh_cache-26903491-00c6-4726-b2a6-4d1f482d8785" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2284.683486] env[68040]: DEBUG oslo_concurrency.lockutils [req-6321c15d-f739-4d3b-bb49-3fed9a155257 req-d5a89829-edad-4906-a45e-cabec6288304 service nova] Acquired lock "refresh_cache-26903491-00c6-4726-b2a6-4d1f482d8785" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2284.683655] env[68040]: DEBUG nova.network.neutron [req-6321c15d-f739-4d3b-bb49-3fed9a155257 req-d5a89829-edad-4906-a45e-cabec6288304 service nova] [instance: 26903491-00c6-4726-b2a6-4d1f482d8785] Refreshing network info cache for port 9e9357d5-3f89-498f-a9ef-f9644bc86de8 {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2284.925759] env[68040]: DEBUG nova.network.neutron [req-6321c15d-f739-4d3b-bb49-3fed9a155257 req-d5a89829-edad-4906-a45e-cabec6288304 service nova] [instance: 26903491-00c6-4726-b2a6-4d1f482d8785] Updated VIF entry in instance network info cache for port 9e9357d5-3f89-498f-a9ef-f9644bc86de8. {{(pid=68040) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2284.926131] env[68040]: DEBUG nova.network.neutron [req-6321c15d-f739-4d3b-bb49-3fed9a155257 req-d5a89829-edad-4906-a45e-cabec6288304 service nova] [instance: 26903491-00c6-4726-b2a6-4d1f482d8785] Updating instance_info_cache with network_info: [{"id": "9e9357d5-3f89-498f-a9ef-f9644bc86de8", "address": "fa:16:3e:6e:ad:85", "network": {"id": "9839fe48-68c5-4649-bd83-6b4d9c6008e8", "bridge": "br-int", "label": "tempest-ServersTestJSON-1965746643-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52dbf578e94a4db7af130703ad4eb741", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4b033f4d-2e92-4702-add6-410a29d3f251", "external-id": "nsx-vlan-transportzone-649", "segmentation_id": 649, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e9357d5-3f", "ovs_interfaceid": "9e9357d5-3f89-498f-a9ef-f9644bc86de8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2284.935566] env[68040]: DEBUG oslo_concurrency.lockutils [req-6321c15d-f739-4d3b-bb49-3fed9a155257 req-d5a89829-edad-4906-a45e-cabec6288304 service nova] Releasing lock "refresh_cache-26903491-00c6-4726-b2a6-4d1f482d8785" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2291.983613] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2299.983482] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2302.983991] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2302.984393] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Starting heal instance info cache {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2302.984393] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Rebuilding the list of instances to heal {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2303.000225] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2303.000385] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2303.000525] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: d0134198-5d43-47de-a1fc-490cca429e55] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2303.000653] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 26903491-00c6-4726-b2a6-4d1f482d8785] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2303.000790] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Didn't find any instances for network info cache update. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2306.984104] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2306.984517] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2306.984517] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2307.983644] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2307.983918] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68040) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2308.984261] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2308.997252] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2308.997496] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2308.997671] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2308.997998] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68040) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2308.999372] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1c817f1-e832-469d-a94c-12aad024d1ba {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.008062] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8fcaaf9-b478-41a2-a02b-37f4b0d961b3 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.022819] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21b6ac93-3352-4cf9-b935-9071b06586be {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.029217] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7205b815-4754-470e-9af9-737939eb10af {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.059333] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180991MB free_disk=125GB free_vcpus=48 pci_devices=None {{(pid=68040) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2309.059528] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2309.059761] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2309.114486] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 92b0f3c0-2c87-478d-8b11-f0b05aee12ed actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2309.114656] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance e9994aad-8053-4936-ad4b-5347a1a62f4e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2309.114787] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance d0134198-5d43-47de-a1fc-490cca429e55 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2309.114912] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 26903491-00c6-4726-b2a6-4d1f482d8785 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2309.115114] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2309.115264] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1024MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2309.180953] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a2430d8-1ab3-4464-ab4f-a2a6d8987159 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.188677] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50f735d2-f447-49da-8013-f8ade6427d72 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.218850] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a955e4ec-c1a1-478b-b810-addc37102c39 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.226341] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5365afd2-405a-4f5a-a531-fcf790c37680 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.241579] env[68040]: DEBUG nova.compute.provider_tree [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2309.250383] env[68040]: DEBUG nova.scheduler.client.report [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2309.264448] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68040) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2309.264678] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.205s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2313.259920] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2320.978923] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2329.497427] env[68040]: WARNING oslo_vmware.rw_handles [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2329.497427] env[68040]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2329.497427] env[68040]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2329.497427] env[68040]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2329.497427] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2329.497427] env[68040]: ERROR oslo_vmware.rw_handles response.begin() [ 2329.497427] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2329.497427] env[68040]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2329.497427] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2329.497427] env[68040]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2329.497427] env[68040]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2329.497427] env[68040]: ERROR oslo_vmware.rw_handles [ 2329.498383] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Downloaded image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to vmware_temp/c17f2fcd-bb86-43ec-8001-92cc49a157ff/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2329.499701] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Caching image {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2329.499951] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Copying Virtual Disk [datastore2] vmware_temp/c17f2fcd-bb86-43ec-8001-92cc49a157ff/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk to [datastore2] vmware_temp/c17f2fcd-bb86-43ec-8001-92cc49a157ff/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk {{(pid=68040) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2329.500260] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bd30716a-f70e-40ee-9ec7-95735f535dc8 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2329.507161] env[68040]: DEBUG oslo_vmware.api [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Waiting for the task: (returnval){ [ 2329.507161] env[68040]: value = "task-3200362" [ 2329.507161] env[68040]: _type = "Task" [ 2329.507161] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2329.514674] env[68040]: DEBUG oslo_vmware.api [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Task: {'id': task-3200362, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2330.019021] env[68040]: DEBUG oslo_vmware.exceptions [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Fault InvalidArgument not matched. {{(pid=68040) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2330.019021] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2330.019021] env[68040]: ERROR nova.compute.manager [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2330.019021] env[68040]: Faults: ['InvalidArgument'] [ 2330.019021] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Traceback (most recent call last): [ 2330.019021] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2330.019021] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] yield resources [ 2330.019021] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2330.019021] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] self.driver.spawn(context, instance, image_meta, [ 2330.019495] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2330.019495] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2330.019495] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2330.019495] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] self._fetch_image_if_missing(context, vi) [ 2330.019495] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2330.019495] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] image_cache(vi, tmp_image_ds_loc) [ 2330.019495] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2330.019495] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] vm_util.copy_virtual_disk( [ 2330.019495] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2330.019495] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] session._wait_for_task(vmdk_copy_task) [ 2330.019495] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2330.019495] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] return self.wait_for_task(task_ref) [ 2330.019495] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2330.019880] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] return evt.wait() [ 2330.019880] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2330.019880] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] result = hub.switch() [ 2330.019880] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2330.019880] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] return self.greenlet.switch() [ 2330.019880] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2330.019880] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] self.f(*self.args, **self.kw) [ 2330.019880] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2330.019880] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] raise exceptions.translate_fault(task_info.error) [ 2330.019880] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2330.019880] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Faults: ['InvalidArgument'] [ 2330.019880] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] [ 2330.020266] env[68040]: INFO nova.compute.manager [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Terminating instance [ 2330.020824] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2330.021047] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2330.021294] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4142036c-f62c-4e84-929b-7cc1abe727ea {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.023409] env[68040]: DEBUG nova.compute.manager [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2330.023606] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2330.024337] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbd772d9-4f03-431b-8bed-c4a29d28edf7 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.031019] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Unregistering the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2330.031308] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2118b0f4-9d79-4531-ae1b-5b14c3324058 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.033511] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2330.033689] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68040) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2330.034660] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c360e4d-e3ca-41e2-9c0c-284df5b10da0 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.039010] env[68040]: DEBUG oslo_vmware.api [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Waiting for the task: (returnval){ [ 2330.039010] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]523b7837-8f48-3d9c-96de-aa1b6cdffa23" [ 2330.039010] env[68040]: _type = "Task" [ 2330.039010] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2330.096759] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Unregistered the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2330.096964] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Deleting contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2330.097167] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Deleting the datastore file [datastore2] 92b0f3c0-2c87-478d-8b11-f0b05aee12ed {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2330.097474] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-afb22660-548e-47a4-a5f4-2faa687e54b0 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.103522] env[68040]: DEBUG oslo_vmware.api [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Waiting for the task: (returnval){ [ 2330.103522] env[68040]: value = "task-3200364" [ 2330.103522] env[68040]: _type = "Task" [ 2330.103522] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2330.110893] env[68040]: DEBUG oslo_vmware.api [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Task: {'id': task-3200364, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2330.549251] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Preparing fetch location {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2330.549628] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Creating directory with path [datastore2] vmware_temp/899b0c24-2f34-45da-b5e6-a011ca1439e1/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2330.549723] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6d6aa75f-9a4d-455b-ba96-202cd6d89640 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.560205] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Created directory with path [datastore2] vmware_temp/899b0c24-2f34-45da-b5e6-a011ca1439e1/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2330.560409] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Fetch image to [datastore2] vmware_temp/899b0c24-2f34-45da-b5e6-a011ca1439e1/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2330.560608] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to [datastore2] vmware_temp/899b0c24-2f34-45da-b5e6-a011ca1439e1/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2330.561327] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b43e263-90c7-4660-bc86-a1ff8786009b {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.567462] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f96ac564-1fa0-4c29-addd-db1697ded249 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.576011] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a5eef68-1120-4ce9-9d8c-3257e0488a32 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.610214] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46d035e6-0982-4137-bdc2-f2473a892b80 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.616975] env[68040]: DEBUG oslo_vmware.api [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Task: {'id': task-3200364, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.074877} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2330.618378] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Deleted the datastore file {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2330.618575] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Deleted contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2330.618747] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2330.618927] env[68040]: INFO nova.compute.manager [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2330.620704] env[68040]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-42b7bbf9-6b58-46b8-8c5c-8ee466289da6 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.622572] env[68040]: DEBUG nova.compute.claims [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Aborting claim: {{(pid=68040) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2330.622746] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2330.622961] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2330.641977] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2330.694740] env[68040]: DEBUG oslo_vmware.rw_handles [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/899b0c24-2f34-45da-b5e6-a011ca1439e1/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2330.756609] env[68040]: DEBUG oslo_vmware.rw_handles [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Completed reading data from the image iterator. {{(pid=68040) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2330.756796] env[68040]: DEBUG oslo_vmware.rw_handles [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/899b0c24-2f34-45da-b5e6-a011ca1439e1/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2330.776110] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f21da9b-54cd-4908-81be-cb2b61c7ace8 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.782879] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ef49c34-1f7d-4596-a268-b8444033b7d6 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.812761] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44a09819-4ddc-44f1-922e-ddbb28cdcea9 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.819230] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95e3b44c-5d7b-44e1-8a01-e796592a8063 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.833070] env[68040]: DEBUG nova.compute.provider_tree [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2330.841623] env[68040]: DEBUG nova.scheduler.client.report [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2330.855600] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.233s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2330.856116] env[68040]: ERROR nova.compute.manager [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2330.856116] env[68040]: Faults: ['InvalidArgument'] [ 2330.856116] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Traceback (most recent call last): [ 2330.856116] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2330.856116] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] self.driver.spawn(context, instance, image_meta, [ 2330.856116] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2330.856116] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2330.856116] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2330.856116] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] self._fetch_image_if_missing(context, vi) [ 2330.856116] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2330.856116] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] image_cache(vi, tmp_image_ds_loc) [ 2330.856116] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2330.856494] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] vm_util.copy_virtual_disk( [ 2330.856494] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2330.856494] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] session._wait_for_task(vmdk_copy_task) [ 2330.856494] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2330.856494] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] return self.wait_for_task(task_ref) [ 2330.856494] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2330.856494] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] return evt.wait() [ 2330.856494] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2330.856494] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] result = hub.switch() [ 2330.856494] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2330.856494] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] return self.greenlet.switch() [ 2330.856494] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2330.856494] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] self.f(*self.args, **self.kw) [ 2330.856900] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2330.856900] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] raise exceptions.translate_fault(task_info.error) [ 2330.856900] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2330.856900] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Faults: ['InvalidArgument'] [ 2330.856900] env[68040]: ERROR nova.compute.manager [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] [ 2330.856900] env[68040]: DEBUG nova.compute.utils [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] VimFaultException {{(pid=68040) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2330.858095] env[68040]: DEBUG nova.compute.manager [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Build of instance 92b0f3c0-2c87-478d-8b11-f0b05aee12ed was re-scheduled: A specified parameter was not correct: fileType [ 2330.858095] env[68040]: Faults: ['InvalidArgument'] {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2330.858472] env[68040]: DEBUG nova.compute.manager [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Unplugging VIFs for instance {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2330.858646] env[68040]: DEBUG nova.compute.manager [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2330.858817] env[68040]: DEBUG nova.compute.manager [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2330.859017] env[68040]: DEBUG nova.network.neutron [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2331.162539] env[68040]: DEBUG nova.network.neutron [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2331.174185] env[68040]: INFO nova.compute.manager [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Took 0.31 seconds to deallocate network for instance. [ 2331.268444] env[68040]: INFO nova.scheduler.client.report [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Deleted allocations for instance 92b0f3c0-2c87-478d-8b11-f0b05aee12ed [ 2331.288919] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a45938e3-e4a2-42fa-8165-42811e0d7eb3 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Lock "92b0f3c0-2c87-478d-8b11-f0b05aee12ed" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 442.647s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2331.289209] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4ccc7789-ba86-465f-a4d5-8145aeca0310 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Lock "92b0f3c0-2c87-478d-8b11-f0b05aee12ed" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 246.974s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2331.289493] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4ccc7789-ba86-465f-a4d5-8145aeca0310 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Acquiring lock "92b0f3c0-2c87-478d-8b11-f0b05aee12ed-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2331.289711] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4ccc7789-ba86-465f-a4d5-8145aeca0310 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Lock "92b0f3c0-2c87-478d-8b11-f0b05aee12ed-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2331.289880] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4ccc7789-ba86-465f-a4d5-8145aeca0310 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Lock "92b0f3c0-2c87-478d-8b11-f0b05aee12ed-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2331.291912] env[68040]: INFO nova.compute.manager [None req-4ccc7789-ba86-465f-a4d5-8145aeca0310 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Terminating instance [ 2331.293642] env[68040]: DEBUG nova.compute.manager [None req-4ccc7789-ba86-465f-a4d5-8145aeca0310 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2331.293853] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-4ccc7789-ba86-465f-a4d5-8145aeca0310 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2331.294388] env[68040]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c044a009-6715-48e6-bd06-d0dd85811c06 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2331.304300] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-169e476b-bcd0-4087-9d67-be6c7e97875f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2331.328971] env[68040]: WARNING nova.virt.vmwareapi.vmops [None req-4ccc7789-ba86-465f-a4d5-8145aeca0310 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 92b0f3c0-2c87-478d-8b11-f0b05aee12ed could not be found. [ 2331.329205] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-4ccc7789-ba86-465f-a4d5-8145aeca0310 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2331.329414] env[68040]: INFO nova.compute.manager [None req-4ccc7789-ba86-465f-a4d5-8145aeca0310 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2331.329669] env[68040]: DEBUG oslo.service.loopingcall [None req-4ccc7789-ba86-465f-a4d5-8145aeca0310 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2331.329902] env[68040]: DEBUG nova.compute.manager [-] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2331.330011] env[68040]: DEBUG nova.network.neutron [-] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2331.371024] env[68040]: DEBUG nova.network.neutron [-] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2331.379894] env[68040]: INFO nova.compute.manager [-] [instance: 92b0f3c0-2c87-478d-8b11-f0b05aee12ed] Took 0.05 seconds to deallocate network for instance. [ 2331.475244] env[68040]: DEBUG oslo_concurrency.lockutils [None req-4ccc7789-ba86-465f-a4d5-8145aeca0310 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Lock "92b0f3c0-2c87-478d-8b11-f0b05aee12ed" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.186s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2353.984362] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2359.984341] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2362.984341] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2362.984657] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Starting heal instance info cache {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2362.984657] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Rebuilding the list of instances to heal {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2362.997855] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2362.998038] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: d0134198-5d43-47de-a1fc-490cca429e55] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2362.998180] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 26903491-00c6-4726-b2a6-4d1f482d8785] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2362.998309] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Didn't find any instances for network info cache update. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2367.983939] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2367.984200] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2368.984848] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2369.983620] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2369.984182] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68040) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2370.985050] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2370.996928] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2370.997180] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2370.997355] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2370.997512] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68040) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2370.998876] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-142ea724-f0be-4171-b6dd-0dbfecc80b9d {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2371.007629] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-904cb64a-2742-432c-801a-f7b3e3c0b90e {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2371.021414] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59abc8af-2d63-4fbe-adc6-d0f014c7f907 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2371.027491] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dde8a02-bcac-415e-8fde-217364676db9 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2371.056026] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180987MB free_disk=125GB free_vcpus=48 pci_devices=None {{(pid=68040) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2371.056026] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2371.056026] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2371.104052] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance e9994aad-8053-4936-ad4b-5347a1a62f4e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2371.104275] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance d0134198-5d43-47de-a1fc-490cca429e55 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2371.104418] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 26903491-00c6-4726-b2a6-4d1f482d8785 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2371.104598] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2371.104739] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=3 pci_stats=[] {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2371.151832] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c20563fb-0cbd-464b-96da-a1559c2d2c03 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2371.160238] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f828218-3f36-4b6e-b6a2-b614e30e9c16 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2371.188538] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2005d1a-75d9-4394-b996-5e183a3dac47 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2371.194950] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5ee725d-9f2a-4a9f-99b8-05469b549b8c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2371.208047] env[68040]: DEBUG nova.compute.provider_tree [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2371.216416] env[68040]: DEBUG nova.scheduler.client.report [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2371.232614] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68040) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2371.232802] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.177s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2374.227163] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2378.854669] env[68040]: WARNING oslo_vmware.rw_handles [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2378.854669] env[68040]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2378.854669] env[68040]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2378.854669] env[68040]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2378.854669] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2378.854669] env[68040]: ERROR oslo_vmware.rw_handles response.begin() [ 2378.854669] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2378.854669] env[68040]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2378.854669] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2378.854669] env[68040]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2378.854669] env[68040]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2378.854669] env[68040]: ERROR oslo_vmware.rw_handles [ 2378.855205] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Downloaded image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to vmware_temp/899b0c24-2f34-45da-b5e6-a011ca1439e1/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2378.857241] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Caching image {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2378.857601] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Copying Virtual Disk [datastore2] vmware_temp/899b0c24-2f34-45da-b5e6-a011ca1439e1/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk to [datastore2] vmware_temp/899b0c24-2f34-45da-b5e6-a011ca1439e1/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk {{(pid=68040) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2378.857897] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-00be378a-3c26-41b4-93a0-6b5f86a3a354 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2378.865902] env[68040]: DEBUG oslo_vmware.api [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Waiting for the task: (returnval){ [ 2378.865902] env[68040]: value = "task-3200365" [ 2378.865902] env[68040]: _type = "Task" [ 2378.865902] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2378.873306] env[68040]: DEBUG oslo_vmware.api [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Task: {'id': task-3200365, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2379.376235] env[68040]: DEBUG oslo_vmware.exceptions [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Fault InvalidArgument not matched. {{(pid=68040) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2379.376510] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2379.377161] env[68040]: ERROR nova.compute.manager [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2379.377161] env[68040]: Faults: ['InvalidArgument'] [ 2379.377161] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Traceback (most recent call last): [ 2379.377161] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2379.377161] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] yield resources [ 2379.377161] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2379.377161] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] self.driver.spawn(context, instance, image_meta, [ 2379.377161] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2379.377161] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2379.377161] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2379.377161] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] self._fetch_image_if_missing(context, vi) [ 2379.377161] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2379.377505] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] image_cache(vi, tmp_image_ds_loc) [ 2379.377505] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2379.377505] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] vm_util.copy_virtual_disk( [ 2379.377505] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2379.377505] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] session._wait_for_task(vmdk_copy_task) [ 2379.377505] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2379.377505] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] return self.wait_for_task(task_ref) [ 2379.377505] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2379.377505] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] return evt.wait() [ 2379.377505] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2379.377505] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] result = hub.switch() [ 2379.377505] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2379.377505] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] return self.greenlet.switch() [ 2379.377838] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2379.377838] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] self.f(*self.args, **self.kw) [ 2379.377838] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2379.377838] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] raise exceptions.translate_fault(task_info.error) [ 2379.377838] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2379.377838] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Faults: ['InvalidArgument'] [ 2379.377838] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] [ 2379.377838] env[68040]: INFO nova.compute.manager [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Terminating instance [ 2379.379702] env[68040]: DEBUG oslo_concurrency.lockutils [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2379.379702] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2379.379702] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-475b8306-2e83-48f8-938a-dd5f658987b3 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2379.381575] env[68040]: DEBUG nova.compute.manager [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2379.381763] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2379.382492] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95503fe7-cb9b-4985-92df-b0e341a2397e {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2379.388799] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Unregistering the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2379.389008] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c242c392-6512-43b4-b359-5e51df503448 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2379.391041] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2379.391224] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68040) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2379.392153] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b741ec9-37f1-4cc6-a493-ef6eb5f0320d {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2379.396466] env[68040]: DEBUG oslo_vmware.api [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Waiting for the task: (returnval){ [ 2379.396466] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52757bbc-5980-ecb6-d592-effab2bfecb7" [ 2379.396466] env[68040]: _type = "Task" [ 2379.396466] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2379.404613] env[68040]: DEBUG oslo_vmware.api [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52757bbc-5980-ecb6-d592-effab2bfecb7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2379.460474] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Unregistered the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2379.460698] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Deleting contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2379.460882] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Deleting the datastore file [datastore2] e9994aad-8053-4936-ad4b-5347a1a62f4e {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2379.461223] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1983552f-a727-4466-8e7d-c3d28f7c6b51 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2379.467163] env[68040]: DEBUG oslo_vmware.api [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Waiting for the task: (returnval){ [ 2379.467163] env[68040]: value = "task-3200367" [ 2379.467163] env[68040]: _type = "Task" [ 2379.467163] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2379.474530] env[68040]: DEBUG oslo_vmware.api [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Task: {'id': task-3200367, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2379.906858] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: d0134198-5d43-47de-a1fc-490cca429e55] Preparing fetch location {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2379.907150] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Creating directory with path [datastore2] vmware_temp/1b855abe-6bda-47b7-9f13-41097360eb86/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2379.907402] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9027e966-6905-42ad-9546-3d05d1362c5e {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2379.918969] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Created directory with path [datastore2] vmware_temp/1b855abe-6bda-47b7-9f13-41097360eb86/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2379.919226] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: d0134198-5d43-47de-a1fc-490cca429e55] Fetch image to [datastore2] vmware_temp/1b855abe-6bda-47b7-9f13-41097360eb86/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2379.919420] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: d0134198-5d43-47de-a1fc-490cca429e55] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to [datastore2] vmware_temp/1b855abe-6bda-47b7-9f13-41097360eb86/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2379.920188] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a54f7cd6-f7c4-4358-a482-0f6d7ac08851 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2379.926882] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fa0ac80-3160-4ab6-a992-57d2cee0aeae {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2379.935432] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61bad39b-3163-47fd-8c6e-f4383da172f0 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2379.965738] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d660e32a-3b72-4ae5-8ec3-8ffdab46b740 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2379.976442] env[68040]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-5f9d2438-262c-49e8-97d4-fe7f523f825c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2379.978044] env[68040]: DEBUG oslo_vmware.api [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Task: {'id': task-3200367, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.063527} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2379.978319] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Deleted the datastore file {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2379.978507] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Deleted contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2379.978678] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2379.978851] env[68040]: INFO nova.compute.manager [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2379.980897] env[68040]: DEBUG nova.compute.claims [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Aborting claim: {{(pid=68040) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2379.981101] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2379.981375] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2379.998816] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: d0134198-5d43-47de-a1fc-490cca429e55] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2380.052964] env[68040]: DEBUG oslo_vmware.rw_handles [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1b855abe-6bda-47b7-9f13-41097360eb86/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2380.111528] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-385480c6-cdbe-474e-875a-84849bcb2ab6 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2380.116086] env[68040]: DEBUG oslo_vmware.rw_handles [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Completed reading data from the image iterator. {{(pid=68040) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2380.116265] env[68040]: DEBUG oslo_vmware.rw_handles [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1b855abe-6bda-47b7-9f13-41097360eb86/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2380.119762] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0566f30-3fc9-4b41-a78d-ad3b08b20c12 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2380.150201] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-385cb5e4-3dac-4137-a6af-5fcbe25fc19c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2380.156952] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24777a3c-181d-430a-b19d-0535972b99cf {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2380.169374] env[68040]: DEBUG nova.compute.provider_tree [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2380.177521] env[68040]: DEBUG nova.scheduler.client.report [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2380.190345] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.209s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2380.190871] env[68040]: ERROR nova.compute.manager [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2380.190871] env[68040]: Faults: ['InvalidArgument'] [ 2380.190871] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Traceback (most recent call last): [ 2380.190871] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2380.190871] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] self.driver.spawn(context, instance, image_meta, [ 2380.190871] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2380.190871] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2380.190871] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2380.190871] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] self._fetch_image_if_missing(context, vi) [ 2380.190871] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2380.190871] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] image_cache(vi, tmp_image_ds_loc) [ 2380.190871] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2380.191196] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] vm_util.copy_virtual_disk( [ 2380.191196] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2380.191196] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] session._wait_for_task(vmdk_copy_task) [ 2380.191196] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2380.191196] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] return self.wait_for_task(task_ref) [ 2380.191196] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2380.191196] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] return evt.wait() [ 2380.191196] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2380.191196] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] result = hub.switch() [ 2380.191196] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2380.191196] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] return self.greenlet.switch() [ 2380.191196] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2380.191196] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] self.f(*self.args, **self.kw) [ 2380.191521] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2380.191521] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] raise exceptions.translate_fault(task_info.error) [ 2380.191521] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2380.191521] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Faults: ['InvalidArgument'] [ 2380.191521] env[68040]: ERROR nova.compute.manager [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] [ 2380.191642] env[68040]: DEBUG nova.compute.utils [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] VimFaultException {{(pid=68040) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2380.193213] env[68040]: DEBUG nova.compute.manager [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Build of instance e9994aad-8053-4936-ad4b-5347a1a62f4e was re-scheduled: A specified parameter was not correct: fileType [ 2380.193213] env[68040]: Faults: ['InvalidArgument'] {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2380.193578] env[68040]: DEBUG nova.compute.manager [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Unplugging VIFs for instance {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2380.193751] env[68040]: DEBUG nova.compute.manager [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2380.193922] env[68040]: DEBUG nova.compute.manager [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2380.194100] env[68040]: DEBUG nova.network.neutron [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2380.497148] env[68040]: DEBUG nova.network.neutron [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2380.515550] env[68040]: INFO nova.compute.manager [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Took 0.32 seconds to deallocate network for instance. [ 2380.609063] env[68040]: INFO nova.scheduler.client.report [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Deleted allocations for instance e9994aad-8053-4936-ad4b-5347a1a62f4e [ 2380.631351] env[68040]: DEBUG oslo_concurrency.lockutils [None req-a19aebac-e55d-4733-941a-c8da29b0c576 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Lock "e9994aad-8053-4936-ad4b-5347a1a62f4e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 392.208s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2380.631638] env[68040]: DEBUG oslo_concurrency.lockutils [None req-2ba91fa1-e2d7-40f1-b5c3-56dc892cae5c tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Lock "e9994aad-8053-4936-ad4b-5347a1a62f4e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 196.083s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2380.631879] env[68040]: DEBUG oslo_concurrency.lockutils [None req-2ba91fa1-e2d7-40f1-b5c3-56dc892cae5c tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Acquiring lock "e9994aad-8053-4936-ad4b-5347a1a62f4e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2380.632107] env[68040]: DEBUG oslo_concurrency.lockutils [None req-2ba91fa1-e2d7-40f1-b5c3-56dc892cae5c tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Lock "e9994aad-8053-4936-ad4b-5347a1a62f4e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2380.632284] env[68040]: DEBUG oslo_concurrency.lockutils [None req-2ba91fa1-e2d7-40f1-b5c3-56dc892cae5c tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Lock "e9994aad-8053-4936-ad4b-5347a1a62f4e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2380.634615] env[68040]: INFO nova.compute.manager [None req-2ba91fa1-e2d7-40f1-b5c3-56dc892cae5c tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Terminating instance [ 2380.636405] env[68040]: DEBUG nova.compute.manager [None req-2ba91fa1-e2d7-40f1-b5c3-56dc892cae5c tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2380.636691] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-2ba91fa1-e2d7-40f1-b5c3-56dc892cae5c tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2380.637113] env[68040]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ecb4c79e-b686-4965-9b96-bddbc08015ce {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2380.646463] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e862621a-2eb8-421c-b9f9-c20a2cdbe9a7 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2380.672938] env[68040]: WARNING nova.virt.vmwareapi.vmops [None req-2ba91fa1-e2d7-40f1-b5c3-56dc892cae5c tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e9994aad-8053-4936-ad4b-5347a1a62f4e could not be found. [ 2380.673029] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-2ba91fa1-e2d7-40f1-b5c3-56dc892cae5c tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2380.673228] env[68040]: INFO nova.compute.manager [None req-2ba91fa1-e2d7-40f1-b5c3-56dc892cae5c tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2380.673470] env[68040]: DEBUG oslo.service.loopingcall [None req-2ba91fa1-e2d7-40f1-b5c3-56dc892cae5c tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2380.673940] env[68040]: DEBUG nova.compute.manager [-] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2380.674060] env[68040]: DEBUG nova.network.neutron [-] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2380.696618] env[68040]: DEBUG nova.network.neutron [-] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2380.704842] env[68040]: INFO nova.compute.manager [-] [instance: e9994aad-8053-4936-ad4b-5347a1a62f4e] Took 0.03 seconds to deallocate network for instance. [ 2380.794557] env[68040]: DEBUG oslo_concurrency.lockutils [None req-2ba91fa1-e2d7-40f1-b5c3-56dc892cae5c tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Lock "e9994aad-8053-4936-ad4b-5347a1a62f4e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.163s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2381.087985] env[68040]: DEBUG oslo_concurrency.lockutils [None req-649fbc50-7a18-446e-846a-7cac69941214 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Acquiring lock "953c3724-2053-4b74-b3de-b4f431bbe4ec" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2381.088266] env[68040]: DEBUG oslo_concurrency.lockutils [None req-649fbc50-7a18-446e-846a-7cac69941214 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Lock "953c3724-2053-4b74-b3de-b4f431bbe4ec" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2381.098240] env[68040]: DEBUG nova.compute.manager [None req-649fbc50-7a18-446e-846a-7cac69941214 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 953c3724-2053-4b74-b3de-b4f431bbe4ec] Starting instance... {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 2381.144666] env[68040]: DEBUG oslo_concurrency.lockutils [None req-649fbc50-7a18-446e-846a-7cac69941214 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2381.144905] env[68040]: DEBUG oslo_concurrency.lockutils [None req-649fbc50-7a18-446e-846a-7cac69941214 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2381.146258] env[68040]: INFO nova.compute.claims [None req-649fbc50-7a18-446e-846a-7cac69941214 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 953c3724-2053-4b74-b3de-b4f431bbe4ec] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2381.236421] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56cee453-281e-46e3-84a3-cfa7505b7737 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2381.245018] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-120b2498-a987-409f-8e27-5a01534f11fc {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2381.275714] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83543dc7-e191-4629-a987-5afcbb076277 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2381.283235] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-048876d2-c61e-43a2-82d0-92c3de514c5c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2381.296057] env[68040]: DEBUG nova.compute.provider_tree [None req-649fbc50-7a18-446e-846a-7cac69941214 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2381.307024] env[68040]: DEBUG nova.scheduler.client.report [None req-649fbc50-7a18-446e-846a-7cac69941214 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2381.320293] env[68040]: DEBUG oslo_concurrency.lockutils [None req-649fbc50-7a18-446e-846a-7cac69941214 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.175s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2381.320755] env[68040]: DEBUG nova.compute.manager [None req-649fbc50-7a18-446e-846a-7cac69941214 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 953c3724-2053-4b74-b3de-b4f431bbe4ec] Start building networks asynchronously for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 2381.353836] env[68040]: DEBUG nova.compute.utils [None req-649fbc50-7a18-446e-846a-7cac69941214 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Using /dev/sd instead of None {{(pid=68040) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2381.355134] env[68040]: DEBUG nova.compute.manager [None req-649fbc50-7a18-446e-846a-7cac69941214 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 953c3724-2053-4b74-b3de-b4f431bbe4ec] Allocating IP information in the background. {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 2381.355312] env[68040]: DEBUG nova.network.neutron [None req-649fbc50-7a18-446e-846a-7cac69941214 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 953c3724-2053-4b74-b3de-b4f431bbe4ec] allocate_for_instance() {{(pid=68040) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2381.364539] env[68040]: DEBUG nova.compute.manager [None req-649fbc50-7a18-446e-846a-7cac69941214 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 953c3724-2053-4b74-b3de-b4f431bbe4ec] Start building block device mappings for instance. {{(pid=68040) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 2381.415384] env[68040]: DEBUG nova.policy [None req-649fbc50-7a18-446e-846a-7cac69941214 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd5b784bb2384457e9bcc4e9ff02ea850', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9a2c3ee9bf1c40228a089e4b0e5bff00', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68040) authorize /opt/stack/nova/nova/policy.py:203}} [ 2381.427063] env[68040]: DEBUG nova.compute.manager [None req-649fbc50-7a18-446e-846a-7cac69941214 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 953c3724-2053-4b74-b3de-b4f431bbe4ec] Start spawning the instance on the hypervisor. {{(pid=68040) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 2381.452917] env[68040]: DEBUG nova.virt.hardware [None req-649fbc50-7a18-446e-846a-7cac69941214 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-27T05:59:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-27T05:59:33Z,direct_url=,disk_format='vmdk',id=8c308313-03d5-40b6-a5fe-9037e32dc76e,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='0770d674a39c40089de0aade9440b370',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-27T05:59:34Z,virtual_size=,visibility=), allow threads: False {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2381.453172] env[68040]: DEBUG nova.virt.hardware [None req-649fbc50-7a18-446e-846a-7cac69941214 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Flavor limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2381.453371] env[68040]: DEBUG nova.virt.hardware [None req-649fbc50-7a18-446e-846a-7cac69941214 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Image limits 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2381.453582] env[68040]: DEBUG nova.virt.hardware [None req-649fbc50-7a18-446e-846a-7cac69941214 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Flavor pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2381.453737] env[68040]: DEBUG nova.virt.hardware [None req-649fbc50-7a18-446e-846a-7cac69941214 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Image pref 0:0:0 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2381.453888] env[68040]: DEBUG nova.virt.hardware [None req-649fbc50-7a18-446e-846a-7cac69941214 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68040) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2381.454112] env[68040]: DEBUG nova.virt.hardware [None req-649fbc50-7a18-446e-846a-7cac69941214 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2381.454281] env[68040]: DEBUG nova.virt.hardware [None req-649fbc50-7a18-446e-846a-7cac69941214 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2381.454450] env[68040]: DEBUG nova.virt.hardware [None req-649fbc50-7a18-446e-846a-7cac69941214 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Got 1 possible topologies {{(pid=68040) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2381.454616] env[68040]: DEBUG nova.virt.hardware [None req-649fbc50-7a18-446e-846a-7cac69941214 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2381.454794] env[68040]: DEBUG nova.virt.hardware [None req-649fbc50-7a18-446e-846a-7cac69941214 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68040) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2381.455659] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07a2da8f-a6c9-488c-a2b4-25094795e00e {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2381.463636] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-584f8827-3532-4d05-9bb2-79a25ca66d9c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2381.719135] env[68040]: DEBUG nova.network.neutron [None req-649fbc50-7a18-446e-846a-7cac69941214 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 953c3724-2053-4b74-b3de-b4f431bbe4ec] Successfully created port: 91652f0f-ec20-4f01-ae87-26d78493190f {{(pid=68040) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2382.285525] env[68040]: DEBUG nova.network.neutron [None req-649fbc50-7a18-446e-846a-7cac69941214 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 953c3724-2053-4b74-b3de-b4f431bbe4ec] Successfully updated port: 91652f0f-ec20-4f01-ae87-26d78493190f {{(pid=68040) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2382.300434] env[68040]: DEBUG oslo_concurrency.lockutils [None req-649fbc50-7a18-446e-846a-7cac69941214 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Acquiring lock "refresh_cache-953c3724-2053-4b74-b3de-b4f431bbe4ec" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2382.300723] env[68040]: DEBUG oslo_concurrency.lockutils [None req-649fbc50-7a18-446e-846a-7cac69941214 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Acquired lock "refresh_cache-953c3724-2053-4b74-b3de-b4f431bbe4ec" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2382.301965] env[68040]: DEBUG nova.network.neutron [None req-649fbc50-7a18-446e-846a-7cac69941214 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 953c3724-2053-4b74-b3de-b4f431bbe4ec] Building network info cache for instance {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2382.346719] env[68040]: DEBUG nova.network.neutron [None req-649fbc50-7a18-446e-846a-7cac69941214 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 953c3724-2053-4b74-b3de-b4f431bbe4ec] Instance cache missing network info. {{(pid=68040) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2382.515782] env[68040]: DEBUG nova.network.neutron [None req-649fbc50-7a18-446e-846a-7cac69941214 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 953c3724-2053-4b74-b3de-b4f431bbe4ec] Updating instance_info_cache with network_info: [{"id": "91652f0f-ec20-4f01-ae87-26d78493190f", "address": "fa:16:3e:3b:ed:12", "network": {"id": "9565e3df-4a40-4611-a5a9-efd2bc66053b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-780365588-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9a2c3ee9bf1c40228a089e4b0e5bff00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91652f0f-ec", "ovs_interfaceid": "91652f0f-ec20-4f01-ae87-26d78493190f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2382.528695] env[68040]: DEBUG oslo_concurrency.lockutils [None req-649fbc50-7a18-446e-846a-7cac69941214 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Releasing lock "refresh_cache-953c3724-2053-4b74-b3de-b4f431bbe4ec" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2382.529139] env[68040]: DEBUG nova.compute.manager [None req-649fbc50-7a18-446e-846a-7cac69941214 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 953c3724-2053-4b74-b3de-b4f431bbe4ec] Instance network_info: |[{"id": "91652f0f-ec20-4f01-ae87-26d78493190f", "address": "fa:16:3e:3b:ed:12", "network": {"id": "9565e3df-4a40-4611-a5a9-efd2bc66053b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-780365588-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9a2c3ee9bf1c40228a089e4b0e5bff00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91652f0f-ec", "ovs_interfaceid": "91652f0f-ec20-4f01-ae87-26d78493190f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68040) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 2382.529429] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-649fbc50-7a18-446e-846a-7cac69941214 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 953c3724-2053-4b74-b3de-b4f431bbe4ec] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3b:ed:12', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7aa6264-122d-4c35-82d0-860e451538ea', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '91652f0f-ec20-4f01-ae87-26d78493190f', 'vif_model': 'vmxnet3'}] {{(pid=68040) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2382.536931] env[68040]: DEBUG oslo.service.loopingcall [None req-649fbc50-7a18-446e-846a-7cac69941214 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68040) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2382.538182] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 953c3724-2053-4b74-b3de-b4f431bbe4ec] Creating VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2382.539640] env[68040]: DEBUG nova.compute.manager [req-d30504ff-a1ba-44d3-9542-84658ce1d064 req-93fb92a3-c673-4505-850a-214e2caae56f service nova] [instance: 953c3724-2053-4b74-b3de-b4f431bbe4ec] Received event network-vif-plugged-91652f0f-ec20-4f01-ae87-26d78493190f {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2382.539836] env[68040]: DEBUG oslo_concurrency.lockutils [req-d30504ff-a1ba-44d3-9542-84658ce1d064 req-93fb92a3-c673-4505-850a-214e2caae56f service nova] Acquiring lock "953c3724-2053-4b74-b3de-b4f431bbe4ec-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2382.540058] env[68040]: DEBUG oslo_concurrency.lockutils [req-d30504ff-a1ba-44d3-9542-84658ce1d064 req-93fb92a3-c673-4505-850a-214e2caae56f service nova] Lock "953c3724-2053-4b74-b3de-b4f431bbe4ec-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2382.540260] env[68040]: DEBUG oslo_concurrency.lockutils [req-d30504ff-a1ba-44d3-9542-84658ce1d064 req-93fb92a3-c673-4505-850a-214e2caae56f service nova] Lock "953c3724-2053-4b74-b3de-b4f431bbe4ec-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2382.540425] env[68040]: DEBUG nova.compute.manager [req-d30504ff-a1ba-44d3-9542-84658ce1d064 req-93fb92a3-c673-4505-850a-214e2caae56f service nova] [instance: 953c3724-2053-4b74-b3de-b4f431bbe4ec] No waiting events found dispatching network-vif-plugged-91652f0f-ec20-4f01-ae87-26d78493190f {{(pid=68040) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2382.540594] env[68040]: WARNING nova.compute.manager [req-d30504ff-a1ba-44d3-9542-84658ce1d064 req-93fb92a3-c673-4505-850a-214e2caae56f service nova] [instance: 953c3724-2053-4b74-b3de-b4f431bbe4ec] Received unexpected event network-vif-plugged-91652f0f-ec20-4f01-ae87-26d78493190f for instance with vm_state building and task_state spawning. [ 2382.540757] env[68040]: DEBUG nova.compute.manager [req-d30504ff-a1ba-44d3-9542-84658ce1d064 req-93fb92a3-c673-4505-850a-214e2caae56f service nova] [instance: 953c3724-2053-4b74-b3de-b4f431bbe4ec] Received event network-changed-91652f0f-ec20-4f01-ae87-26d78493190f {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 2382.540910] env[68040]: DEBUG nova.compute.manager [req-d30504ff-a1ba-44d3-9542-84658ce1d064 req-93fb92a3-c673-4505-850a-214e2caae56f service nova] [instance: 953c3724-2053-4b74-b3de-b4f431bbe4ec] Refreshing instance network info cache due to event network-changed-91652f0f-ec20-4f01-ae87-26d78493190f. {{(pid=68040) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 2382.541109] env[68040]: DEBUG oslo_concurrency.lockutils [req-d30504ff-a1ba-44d3-9542-84658ce1d064 req-93fb92a3-c673-4505-850a-214e2caae56f service nova] Acquiring lock "refresh_cache-953c3724-2053-4b74-b3de-b4f431bbe4ec" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2382.541267] env[68040]: DEBUG oslo_concurrency.lockutils [req-d30504ff-a1ba-44d3-9542-84658ce1d064 req-93fb92a3-c673-4505-850a-214e2caae56f service nova] Acquired lock "refresh_cache-953c3724-2053-4b74-b3de-b4f431bbe4ec" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2382.541436] env[68040]: DEBUG nova.network.neutron [req-d30504ff-a1ba-44d3-9542-84658ce1d064 req-93fb92a3-c673-4505-850a-214e2caae56f service nova] [instance: 953c3724-2053-4b74-b3de-b4f431bbe4ec] Refreshing network info cache for port 91652f0f-ec20-4f01-ae87-26d78493190f {{(pid=68040) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2382.542416] env[68040]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6f8e67e8-3920-4b7f-a4c6-3b0e8faf2b3c {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.565269] env[68040]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2382.565269] env[68040]: value = "task-3200368" [ 2382.565269] env[68040]: _type = "Task" [ 2382.565269] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2382.573012] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200368, 'name': CreateVM_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2382.800090] env[68040]: DEBUG nova.network.neutron [req-d30504ff-a1ba-44d3-9542-84658ce1d064 req-93fb92a3-c673-4505-850a-214e2caae56f service nova] [instance: 953c3724-2053-4b74-b3de-b4f431bbe4ec] Updated VIF entry in instance network info cache for port 91652f0f-ec20-4f01-ae87-26d78493190f. {{(pid=68040) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2382.800507] env[68040]: DEBUG nova.network.neutron [req-d30504ff-a1ba-44d3-9542-84658ce1d064 req-93fb92a3-c673-4505-850a-214e2caae56f service nova] [instance: 953c3724-2053-4b74-b3de-b4f431bbe4ec] Updating instance_info_cache with network_info: [{"id": "91652f0f-ec20-4f01-ae87-26d78493190f", "address": "fa:16:3e:3b:ed:12", "network": {"id": "9565e3df-4a40-4611-a5a9-efd2bc66053b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-780365588-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9a2c3ee9bf1c40228a089e4b0e5bff00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91652f0f-ec", "ovs_interfaceid": "91652f0f-ec20-4f01-ae87-26d78493190f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2382.811439] env[68040]: DEBUG oslo_concurrency.lockutils [req-d30504ff-a1ba-44d3-9542-84658ce1d064 req-93fb92a3-c673-4505-850a-214e2caae56f service nova] Releasing lock "refresh_cache-953c3724-2053-4b74-b3de-b4f431bbe4ec" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2383.076072] env[68040]: DEBUG oslo_vmware.api [-] Task: {'id': task-3200368, 'name': CreateVM_Task, 'duration_secs': 0.290218} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2383.076257] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 953c3724-2053-4b74-b3de-b4f431bbe4ec] Created VM on the ESX host {{(pid=68040) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2383.076903] env[68040]: DEBUG oslo_concurrency.lockutils [None req-649fbc50-7a18-446e-846a-7cac69941214 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2383.077081] env[68040]: DEBUG oslo_concurrency.lockutils [None req-649fbc50-7a18-446e-846a-7cac69941214 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2383.077418] env[68040]: DEBUG oslo_concurrency.lockutils [None req-649fbc50-7a18-446e-846a-7cac69941214 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2383.077654] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b56dc7cc-7e2a-4871-bc00-db45bc502c02 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2383.081825] env[68040]: DEBUG oslo_vmware.api [None req-649fbc50-7a18-446e-846a-7cac69941214 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Waiting for the task: (returnval){ [ 2383.081825] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]527e4ffd-ac7b-122a-a0eb-3f1340272732" [ 2383.081825] env[68040]: _type = "Task" [ 2383.081825] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2383.088852] env[68040]: DEBUG oslo_vmware.api [None req-649fbc50-7a18-446e-846a-7cac69941214 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]527e4ffd-ac7b-122a-a0eb-3f1340272732, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2383.592455] env[68040]: DEBUG oslo_concurrency.lockutils [None req-649fbc50-7a18-446e-846a-7cac69941214 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2383.592750] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-649fbc50-7a18-446e-846a-7cac69941214 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] [instance: 953c3724-2053-4b74-b3de-b4f431bbe4ec] Processing image 8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2383.592941] env[68040]: DEBUG oslo_concurrency.lockutils [None req-649fbc50-7a18-446e-846a-7cac69941214 tempest-DeleteServersTestJSON-1950766552 tempest-DeleteServersTestJSON-1950766552-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2411.161056] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2411.161501] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Getting list of instances from cluster (obj){ [ 2411.161501] env[68040]: value = "domain-c8" [ 2411.161501] env[68040]: _type = "ClusterComputeResource" [ 2411.161501] env[68040]: } {{(pid=68040) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 2411.162594] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff47fb3e-08f3-4424-a33a-d8a9557c8d22 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2411.173854] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Got total of 3 instances {{(pid=68040) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 2416.013550] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2419.983981] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2424.984662] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2424.985107] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Starting heal instance info cache {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 2424.985107] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Rebuilding the list of instances to heal {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 2425.000576] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: d0134198-5d43-47de-a1fc-490cca429e55] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2425.000719] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 26903491-00c6-4726-b2a6-4d1f482d8785] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2425.000850] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] [instance: 953c3724-2053-4b74-b3de-b4f431bbe4ec] Skipping network cache update for instance because it is Building. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 2425.000979] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Didn't find any instances for network info cache update. {{(pid=68040) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 2427.984472] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2427.984880] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2428.874018] env[68040]: WARNING oslo_vmware.rw_handles [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2428.874018] env[68040]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2428.874018] env[68040]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2428.874018] env[68040]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2428.874018] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2428.874018] env[68040]: ERROR oslo_vmware.rw_handles response.begin() [ 2428.874018] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2428.874018] env[68040]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2428.874018] env[68040]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2428.874018] env[68040]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2428.874018] env[68040]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2428.874018] env[68040]: ERROR oslo_vmware.rw_handles [ 2428.874460] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: d0134198-5d43-47de-a1fc-490cca429e55] Downloaded image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to vmware_temp/1b855abe-6bda-47b7-9f13-41097360eb86/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2428.876382] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: d0134198-5d43-47de-a1fc-490cca429e55] Caching image {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2428.876622] env[68040]: DEBUG nova.virt.vmwareapi.vm_util [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Copying Virtual Disk [datastore2] vmware_temp/1b855abe-6bda-47b7-9f13-41097360eb86/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk to [datastore2] vmware_temp/1b855abe-6bda-47b7-9f13-41097360eb86/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk {{(pid=68040) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2428.876910] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0ec2fd47-692e-492f-ba55-9eaa4026cc1f {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2428.885854] env[68040]: DEBUG oslo_vmware.api [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Waiting for the task: (returnval){ [ 2428.885854] env[68040]: value = "task-3200369" [ 2428.885854] env[68040]: _type = "Task" [ 2428.885854] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2428.894401] env[68040]: DEBUG oslo_vmware.api [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Task: {'id': task-3200369, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2429.397298] env[68040]: DEBUG oslo_vmware.exceptions [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Fault InvalidArgument not matched. {{(pid=68040) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2429.397759] env[68040]: DEBUG oslo_concurrency.lockutils [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Releasing lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2429.398086] env[68040]: ERROR nova.compute.manager [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: d0134198-5d43-47de-a1fc-490cca429e55] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2429.398086] env[68040]: Faults: ['InvalidArgument'] [ 2429.398086] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] Traceback (most recent call last): [ 2429.398086] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 2429.398086] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] yield resources [ 2429.398086] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2429.398086] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] self.driver.spawn(context, instance, image_meta, [ 2429.398086] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2429.398086] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2429.398086] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2429.398086] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] self._fetch_image_if_missing(context, vi) [ 2429.398086] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2429.398646] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] image_cache(vi, tmp_image_ds_loc) [ 2429.398646] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2429.398646] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] vm_util.copy_virtual_disk( [ 2429.398646] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2429.398646] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] session._wait_for_task(vmdk_copy_task) [ 2429.398646] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2429.398646] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] return self.wait_for_task(task_ref) [ 2429.398646] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2429.398646] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] return evt.wait() [ 2429.398646] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2429.398646] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] result = hub.switch() [ 2429.398646] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2429.398646] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] return self.greenlet.switch() [ 2429.399390] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2429.399390] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] self.f(*self.args, **self.kw) [ 2429.399390] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2429.399390] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] raise exceptions.translate_fault(task_info.error) [ 2429.399390] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2429.399390] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] Faults: ['InvalidArgument'] [ 2429.399390] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] [ 2429.399390] env[68040]: INFO nova.compute.manager [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: d0134198-5d43-47de-a1fc-490cca429e55] Terminating instance [ 2429.400044] env[68040]: DEBUG oslo_concurrency.lockutils [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Acquired lock "[datastore2] devstack-image-cache_base/8c308313-03d5-40b6-a5fe-9037e32dc76e/8c308313-03d5-40b6-a5fe-9037e32dc76e.vmdk" {{(pid=68040) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2429.400260] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2429.400502] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2192c172-692e-472f-8526-10f71567b072 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2429.402886] env[68040]: DEBUG nova.compute.manager [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: d0134198-5d43-47de-a1fc-490cca429e55] Start destroying the instance on the hypervisor. {{(pid=68040) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 2429.403089] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: d0134198-5d43-47de-a1fc-490cca429e55] Destroying instance {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2429.403850] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2dc5b5b-436a-4a65-9538-c2853e7fa1c9 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2429.410979] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: d0134198-5d43-47de-a1fc-490cca429e55] Unregistering the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2429.411257] env[68040]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9218caf7-fd30-40a6-b26e-a602f27ee433 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2429.413639] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2429.413817] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68040) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2429.414814] env[68040]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5025b9d-7f5d-4a9a-8eb1-b0830e050b73 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2429.419597] env[68040]: DEBUG oslo_vmware.api [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Waiting for the task: (returnval){ [ 2429.419597] env[68040]: value = "session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52dd584d-dcb5-a731-7bbb-fe41ef84b39b" [ 2429.419597] env[68040]: _type = "Task" [ 2429.419597] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2429.428327] env[68040]: DEBUG oslo_vmware.api [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Task: {'id': session[52120ef6-6aeb-6656-1184-5c67aef0e51c]52dd584d-dcb5-a731-7bbb-fe41ef84b39b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2429.485735] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: d0134198-5d43-47de-a1fc-490cca429e55] Unregistered the VM {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2429.486032] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: d0134198-5d43-47de-a1fc-490cca429e55] Deleting contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2429.486296] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Deleting the datastore file [datastore2] d0134198-5d43-47de-a1fc-490cca429e55 {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2429.486606] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-64a28e2f-514e-43c5-9f0d-daca169d9b72 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2429.492502] env[68040]: DEBUG oslo_vmware.api [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Waiting for the task: (returnval){ [ 2429.492502] env[68040]: value = "task-3200371" [ 2429.492502] env[68040]: _type = "Task" [ 2429.492502] env[68040]: } to complete. {{(pid=68040) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2429.500559] env[68040]: DEBUG oslo_vmware.api [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Task: {'id': task-3200371, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2429.930101] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 26903491-00c6-4726-b2a6-4d1f482d8785] Preparing fetch location {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2429.930354] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Creating directory with path [datastore2] vmware_temp/15ea08a8-679c-4e7c-a749-ab348fce6ece/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2429.930582] env[68040]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fd62df05-4021-43e7-aaf5-37346b9f4610 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2429.941367] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Created directory with path [datastore2] vmware_temp/15ea08a8-679c-4e7c-a749-ab348fce6ece/8c308313-03d5-40b6-a5fe-9037e32dc76e {{(pid=68040) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2429.941548] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 26903491-00c6-4726-b2a6-4d1f482d8785] Fetch image to [datastore2] vmware_temp/15ea08a8-679c-4e7c-a749-ab348fce6ece/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk {{(pid=68040) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2429.941721] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 26903491-00c6-4726-b2a6-4d1f482d8785] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to [datastore2] vmware_temp/15ea08a8-679c-4e7c-a749-ab348fce6ece/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk on the data store datastore2 {{(pid=68040) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2429.942455] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c97843b7-d946-435a-8843-d1ecb65fc4ea {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2429.948853] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87512a80-53ef-4868-938e-7f36b9f111c0 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2429.957651] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fe10f58-6c41-48e4-b31c-f1310a60c8a9 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2429.987273] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb2b5b5f-85df-4c82-b9e1-ad5b82ef8d85 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2429.992772] env[68040]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-db6a4aa7-58e5-40bc-b4e5-07a64e1c6282 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2430.001772] env[68040]: DEBUG oslo_vmware.api [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Task: {'id': task-3200371, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.070623} completed successfully. {{(pid=68040) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2430.002074] env[68040]: DEBUG nova.virt.vmwareapi.ds_util [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Deleted the datastore file {{(pid=68040) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2430.002278] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: d0134198-5d43-47de-a1fc-490cca429e55] Deleted contents of the VM from datastore datastore2 {{(pid=68040) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2430.002460] env[68040]: DEBUG nova.virt.vmwareapi.vmops [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: d0134198-5d43-47de-a1fc-490cca429e55] Instance destroyed {{(pid=68040) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2430.002644] env[68040]: INFO nova.compute.manager [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: d0134198-5d43-47de-a1fc-490cca429e55] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2430.004791] env[68040]: DEBUG nova.compute.claims [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: d0134198-5d43-47de-a1fc-490cca429e55] Aborting claim: {{(pid=68040) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2430.004959] env[68040]: DEBUG oslo_concurrency.lockutils [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2430.005203] env[68040]: DEBUG oslo_concurrency.lockutils [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2430.017292] env[68040]: DEBUG nova.virt.vmwareapi.images [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] [instance: 26903491-00c6-4726-b2a6-4d1f482d8785] Downloading image file data 8c308313-03d5-40b6-a5fe-9037e32dc76e to the data store datastore2 {{(pid=68040) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2430.073830] env[68040]: DEBUG oslo_vmware.rw_handles [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/15ea08a8-679c-4e7c-a749-ab348fce6ece/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2430.134897] env[68040]: DEBUG oslo_vmware.rw_handles [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Completed reading data from the image iterator. {{(pid=68040) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2430.135106] env[68040]: DEBUG oslo_vmware.rw_handles [None req-ed34d362-9040-4f52-88a1-b0d59fb67fd7 tempest-ServersTestJSON-1941559996 tempest-ServersTestJSON-1941559996-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/15ea08a8-679c-4e7c-a749-ab348fce6ece/8c308313-03d5-40b6-a5fe-9037e32dc76e/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68040) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2430.168343] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-194b72f2-89e8-483d-a7df-81bf361fbbe1 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2430.175958] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd678819-5b52-49ce-84c4-63b1054e7ad0 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2430.204971] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d432535f-ce6d-4464-8cd7-6026120e1166 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2430.211769] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42d38485-06af-409a-92b0-adfda123b34e {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2430.224449] env[68040]: DEBUG nova.compute.provider_tree [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2430.233078] env[68040]: DEBUG nova.scheduler.client.report [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2430.246967] env[68040]: DEBUG oslo_concurrency.lockutils [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.242s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2430.247512] env[68040]: ERROR nova.compute.manager [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: d0134198-5d43-47de-a1fc-490cca429e55] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2430.247512] env[68040]: Faults: ['InvalidArgument'] [ 2430.247512] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] Traceback (most recent call last): [ 2430.247512] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 2430.247512] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] self.driver.spawn(context, instance, image_meta, [ 2430.247512] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2430.247512] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2430.247512] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2430.247512] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] self._fetch_image_if_missing(context, vi) [ 2430.247512] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2430.247512] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] image_cache(vi, tmp_image_ds_loc) [ 2430.247512] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2430.247850] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] vm_util.copy_virtual_disk( [ 2430.247850] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2430.247850] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] session._wait_for_task(vmdk_copy_task) [ 2430.247850] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2430.247850] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] return self.wait_for_task(task_ref) [ 2430.247850] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2430.247850] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] return evt.wait() [ 2430.247850] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2430.247850] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] result = hub.switch() [ 2430.247850] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2430.247850] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] return self.greenlet.switch() [ 2430.247850] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2430.247850] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] self.f(*self.args, **self.kw) [ 2430.248199] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2430.248199] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] raise exceptions.translate_fault(task_info.error) [ 2430.248199] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2430.248199] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] Faults: ['InvalidArgument'] [ 2430.248199] env[68040]: ERROR nova.compute.manager [instance: d0134198-5d43-47de-a1fc-490cca429e55] [ 2430.248329] env[68040]: DEBUG nova.compute.utils [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: d0134198-5d43-47de-a1fc-490cca429e55] VimFaultException {{(pid=68040) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2430.249990] env[68040]: DEBUG nova.compute.manager [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: d0134198-5d43-47de-a1fc-490cca429e55] Build of instance d0134198-5d43-47de-a1fc-490cca429e55 was re-scheduled: A specified parameter was not correct: fileType [ 2430.249990] env[68040]: Faults: ['InvalidArgument'] {{(pid=68040) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 2430.250381] env[68040]: DEBUG nova.compute.manager [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: d0134198-5d43-47de-a1fc-490cca429e55] Unplugging VIFs for instance {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 2430.250557] env[68040]: DEBUG nova.compute.manager [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68040) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 2430.250730] env[68040]: DEBUG nova.compute.manager [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: d0134198-5d43-47de-a1fc-490cca429e55] Deallocating network for instance {{(pid=68040) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 2430.250922] env[68040]: DEBUG nova.network.neutron [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: d0134198-5d43-47de-a1fc-490cca429e55] deallocate_for_instance() {{(pid=68040) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2430.559352] env[68040]: DEBUG nova.network.neutron [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: d0134198-5d43-47de-a1fc-490cca429e55] Updating instance_info_cache with network_info: [] {{(pid=68040) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2430.571703] env[68040]: INFO nova.compute.manager [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] [instance: d0134198-5d43-47de-a1fc-490cca429e55] Took 0.32 seconds to deallocate network for instance. [ 2430.680580] env[68040]: INFO nova.scheduler.client.report [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Deleted allocations for instance d0134198-5d43-47de-a1fc-490cca429e55 [ 2430.704894] env[68040]: DEBUG oslo_concurrency.lockutils [None req-12859508-7ea7-4f44-a0d6-6a322620be05 tempest-AttachVolumeShelveTestJSON-1912427195 tempest-AttachVolumeShelveTestJSON-1912427195-project-member] Lock "d0134198-5d43-47de-a1fc-490cca429e55" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 173.143s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2430.983943] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2430.983943] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2430.983943] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Cleaning up deleted instances with incomplete migration {{(pid=68040) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11257}} [ 2431.993789] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2431.994190] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68040) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 2431.994190] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2431.994335] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Cleaning up deleted instances {{(pid=68040) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11219}} [ 2432.005911] env[68040]: DEBUG nova.compute.manager [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] There are 0 instances to clean {{(pid=68040) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 2432.997022] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2433.008972] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2433.009219] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2433.009393] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2433.009555] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68040) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2433.010711] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1edc516-a472-4f7c-a659-9ff6f2da4029 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2433.021158] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0915b9ae-33e1-483b-9061-ed1cf289e749 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2433.037403] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1892410e-0bc9-4326-9ebf-c24ffa0f575a {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2433.041802] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5daa037f-5bc3-4b30-9519-3fa64c160bd7 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2433.074960] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180987MB free_disk=125GB free_vcpus=48 pci_devices=None {{(pid=68040) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2433.075137] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2433.075351] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2433.126386] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 26903491-00c6-4726-b2a6-4d1f482d8785 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2433.126558] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Instance 953c3724-2053-4b74-b3de-b4f431bbe4ec actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68040) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2433.126739] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2433.126884] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=768MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=68040) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2433.167256] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cc7e274-b06e-41de-97c1-a4bb986a4083 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2433.175074] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bf71af5-b397-42fa-938e-10d6178d8d31 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2433.206842] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-225525ce-37f3-4c0a-9b6b-7bf442fbc539 {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2433.214217] env[68040]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef7ae01c-249d-4383-abe6-1db17b5a969e {{(pid=68040) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2433.227193] env[68040]: DEBUG nova.compute.provider_tree [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed in ProviderTree for provider: 22db6f73-b3da-436a-bf40-9c8c240b2e44 {{(pid=68040) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2433.238500] env[68040]: DEBUG nova.scheduler.client.report [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Inventory has not changed for provider 22db6f73-b3da-436a-bf40-9c8c240b2e44 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 125, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68040) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2433.253761] env[68040]: DEBUG nova.compute.resource_tracker [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68040) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2433.253761] env[68040]: DEBUG oslo_concurrency.lockutils [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.178s {{(pid=68040) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2436.237165] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2443.979647] env[68040]: DEBUG oslo_service.periodic_task [None req-df09dbf6-7607-43a6-9833-fb7b116a4625 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68040) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}}